Esempio n. 1
0
def get_status_workspace():
    """
    Get status of working copy, return tuple of
    (changed_paths, new_paths, deleted_paths).
    """
    paths = set()
    for root, dirs, files in os.walk('.'):
        dirs[:] = [d for d in dirs if d != '.git']
        for file in files:
            path = os.path.join(root, file)
            path = path.replace('\\', '/')
            if path.startswith('./'):
                path = path[2:]
            paths.add(path)
    entries_by_path = {e.path: e for e in read_index()}
    entry_paths = set(entries_by_path)

    changed = {
        p
        for p in (paths & entry_paths)
        if hash_object(read_file(p), 'blob', write=False) !=
        entries_by_path[p].sha1.hex()
    }
    new = paths - entry_paths
    deleted = entry_paths - paths
    return (sorted(changed), sorted(new), sorted(deleted))
Esempio n. 2
0
def createBranch(command, newBranchName):
    """
    This function creates a new branch head named <name> which points to the current HEAD.

    command arg is used to distinguish if it has been called by checkout or branch, since it behaves
    a bit differently.
    """
    try:
        repo_root_path = get_repo_root_path()
    except NoRepositoryError as nre:
        print(nre)
        exit(1)

    pathToRef = '{}/.git/refs/heads/{}'.format(repo_root_path, newBranchName)

    # check if branch already exists
    if os.path.isfile(pathToRef):
        print('fatal: A branch named {} already exists.'.format(newBranchName))
        exit(1)
    # If not,
    currentHeadRef = read_file('{}/.git/HEAD'.format(repo_root_path)).decode(
        "utf-8").split('ref:')[1].strip()

    # check if the file under the refs/heads/ directory exists
    if os.path.isfile('{}/.git/{}'.format(repo_root_path, currentHeadRef)):
        # if file exists, then we can read the content
        # and write it into the new file
        commitHash = read_file('{}/.git/{}'.format(
            repo_root_path, currentHeadRef)).decode("utf-8")
        write_file('{}/.git/refs/heads/{}'.format(repo_root_path,
                                                  newBranchName),
                   commitHash,
                   binary='')
    else:
        # if the user executes git branch, an error is thrown
        if command == 'branch':
            print('fatal: Not a valid object name: \'{}\'.'.format(
                currentHeadRef.split('/')[-1]))
            exit(1)

    if command == 'checkout':
        # in case of git switch or checkout, the HEAD file is updated
        update_HEAD(repo_root_path, newBranchName)
        print('Switched to a new branch \'{}\''.format(newBranchName))
Esempio n. 3
0
def get_active_branch():
    """
    Returns the branch name the HEAD is pointing to.
    """
    try:
        repo_root_path = get_repo_root_path()
    except NoRepositoryError as nre:
        print(nre)
        exit(1)
    return read_file('{}/.git/HEAD'.format(repo_root_path)).decode(
        "utf-8").split('ref:')[1].strip().split('/')[-1]
Esempio n. 4
0
def read_object(sha1_prefix):
    """Read object with given SHA-1 prefix and return tuple of
    (object_type, data_bytes), or raise ValueError if not found.
    """
    path = find_object(sha1_prefix)
    full_data = zlib.decompress(read_file(path))
    nul_index = full_data.index(b'\x00')
    header = full_data[:nul_index]
    obj_type, size_str = header.decode().split()
    size = int(size_str)
    data = full_data[nul_index + 1:]
    assert size == len(data), 'expected size {}, got {} bytes'.format(
            size, len(data))
    return (obj_type, data)
Esempio n. 5
0
def listBranches(remotes):
    """Add all file paths to git index."""
    try:
        repo_root_path = get_repo_root_path()
    except NoRepositoryError as nre:
        print(nre)
        exit(1)

    # if remotes flag is set, read remote branches from packed-refs file
    if remotes:
        packed_refs_content = read_file(
            '{}/.git/packed-refs'.format(repo_root_path)).decode('utf-8')
        branches = re.findall('refs\/remotes\/origin\/(\w*)',
                              packed_refs_content)
    else:
        branches = list_files_in_dir(
            '{}/.git/refs/heads'.format(repo_root_path))

    branches.sort()
    result = ''
    for branch in branches:
        result += '* {}\n'.format(branch)
    print(result)
Esempio n. 6
0
def read_index() -> List[IndexEntry]:
    """
    Read git index file and return list of IndexEntry objects.

    Returns:
        List of IndexEntry objects. If no index file is found, an empty list is returned.

    Raises:
        NoRepositoryError: If no git repository is found.
    """
    try:
        repo_root_path = get_repo_root_path()
        data = read_file(os.path.join(repo_root_path, '.git', 'index'))
    except FileNotFoundError:
        return []
    except NoRepositoryError as nre:
        raise NoRepositoryError(nre)
    digest = hashlib.sha1(data[:-20]).digest()
    assert digest == data[-20:], 'invalid index checksum'
    signature, version, num_entries = struct.unpack('!4sLL', data[:12])
    assert signature == b'DIRC', \
            'invalid index signature {}'.format(signature)
    assert version == 2, 'unknown index version {}'.format(version)
    entry_data = data[12:-20]
    entries = []
    i = 0
    while i + 62 < len(entry_data):
        fields_end = i + 62
        fields = struct.unpack('!LLLLLLLLLL20sH', entry_data[i:fields_end])
        path_end = entry_data.index(b'\x00', fields_end)
        path = entry_data[fields_end:path_end]
        entry = IndexEntry(*(fields + (path.decode(), )))
        entries.append(entry)
        entry_len = ((62 + len(path) + 8) // 8) * 8
        i += entry_len
    assert len(entries) == num_entries
    return entries
Esempio n. 7
0
def unpack_object(object_hash, repo_path, path_to_write):
    """
    Takes an tree sha1 hash and read the local object. It iterates over the entries and writes the content of blobs
    to the repository. In case it comes across another tree object, it makes a recursive call.
    """
    #TODO: have to make it more robust. What if it is not a tree object?
    from .gitTree import read_tree

    entries = read_tree(object_hash)
    for entry in entries:
        if entry[0] == GIT_NORMAL_FILE_MODE:
            object_path = os.path.join(repo_path, '.git/objects', entry[2][:2], entry[2][2:])
            full_data = zlib.decompress(read_file(object_path))
            nul_index = full_data.index(b'\x00')
            header = full_data[:nul_index]
            obj_type, size_str = header.decode().split()
            size = int(size_str)
            data = full_data[nul_index + 1:]
            data_path = os.path.join(path_to_write, entry[1])
            if not os.path.exists(data_path):
                os.makedirs(os.path.dirname(data_path), exist_ok=True)
            write_file(data_path, data)
        elif entry[0] == GIT_TREE_MODE:
            unpack_object(entry[2], repo_path, os.path.join(path_to_write, entry[1]))
Esempio n. 8
0
def merge(source_branch):
    """
    Merges two branches. If the source_branch parameter is set, the source branch is merged into the current branch.
    If the parameter is not set, a merge from FETCH_HEAD is performed.
    """
    had_conflict = False
    repo_root_path = get_repo_root_path()
    # if no source branch for merge is give, we go through the FETCH_HEAD file
    if source_branch is None:
        fetch_head_path = os.path.join(repo_root_path, '.git/FETCH_HEAD')
        if not os.path.exists(fetch_head_path):
            print('Nothing to merge. Have you called fetch before?')
            return
        fetch_head_content = read_file(fetch_head_path).decode('utf-8')

        findings = re.findall('^([ABCDEFabcdef0-9]+)\s+branch (\w|\')+',
                              fetch_head_content)
        if len(findings) == 0:
            remote_sha1 = None
        else:
            remote_sha1 = findings[0][0]
    else:
        # otherwise we are looking for the refs file first.
        source_branch_head_path = os.path.join(repo_root_path,
                                               '.git/refs/heads/',
                                               source_branch)
        if not os.path.exists(source_branch_head_path):
            # if the refs file does not exist, we are having a look if the packed-refs file exits
            # git doesn't use the FETCH_HEAD file when a branch name is given!
            packed_refs_path = os.path.join(repo_root_path, '.git/packed-refs')
            if not os.path.exists(packed_refs_path):
                # if not, we are printing an error message and return
                remote_sha1 = None
            # otherwise we read the packed-refs file
            packed_refs_content = read_file(packed_refs_path).decode('utf-8')
            # and read the commit hash
            findings = re.findall(
                '([ABCDEFabcdef0-9]*) refs\/remotes\/origin\/{}'.format(
                    source_branch), packed_refs_content)
            if len(findings) == 0:
                remote_sha1 = None
            else:
                remote_sha1 = findings[0]
        else:
            # if the file exists, we read the sha1 from it
            remote_sha1 = read_file(source_branch_head_path).decode('utf-8')

    if remote_sha1 is None:
        print('merge: {} - not something we can merge'.format(source_branch))
        exit(1)

    activeBranch = get_current_branch_name()
    local_sha1 = get_active_branch_hash()

    remote_sha1 = remote_sha1.strip()
    local_sha1 = local_sha1.strip()

    if remote_sha1 == local_sha1:
        return
    remote_commits = get_all_local_commits(remote_sha1)
    local_commits = get_all_local_commits(local_sha1)

    difference = set(local_commits) - set(remote_commits)

    if len(difference) == 0:
        #fast forward strategy
        path = os.path.join(repo_root_path,
                            '.git/refs/heads/{}'.format(activeBranch))
        write_file(path, "{}\n".format(remote_sha1).encode())
        obj_type, commit_data = read_object(remote_sha1.strip())
        tree_sha1 = commit_data.decode().splitlines()[0][5:45]
        unpack_object(tree_sha1, repo_root_path, repo_root_path)
        return

    # non fast forward strategy
    intersection = set(local_commits).intersection(remote_commits)
    for commit_hash in remote_commits:
        if commit_hash in intersection:
            ancestor = commit_hash
            break

    # We need to find an ancestor and run 3-way merge on these files!
    # than we need to create a new tree and a commit object with 2 parents

    obj_type, ancestor_commit = read_object(ancestor)
    obj_type, a_commit = read_object(local_commits[0])
    obj_type, b_commit = read_object(remote_commits[0])
    # list for the 3 branches
    ancestor_entries = []
    a_entries = []
    b_entries = []
    # here we get a list in the following format [(filename, sha1), (filename, sha2), ...]
    get_subtree_entries(ancestor_commit.splitlines()[0][5:45].decode(), '',
                        ancestor_entries)
    get_subtree_entries(a_commit.splitlines()[0][5:45].decode(), '', a_entries)
    get_subtree_entries(b_commit.splitlines()[0][5:45].decode(), '', b_entries)

    merge = {}
    # wo go through each list and use the filename as key and create a list of hashed
    for e in ancestor_entries:
        if e[0] not in merge:
            merge[e[0]] = [e[1]]

    for e in a_entries:
        if e[0] not in merge:
            merge[e[0]] = [None, e[1]]
        else:
            merge[e[0]].append(e[1])

    for e in b_entries:
        if e[0] not in merge:
            merge[e[0]] = [None, None, e[1]]
        else:
            merge[e[0]].append(e[1])

    # if all hashes are the same, there is nothing we have to do
    # In case the second and third entry are not None, but the first one is: I am not sure if this case actually is possible
    conflict_files = []
    for f in merge:
        if len(merge[f]) == 2 and merge[f][0] != merge[f][1]:
            # if there are only two entries, the remote branch does not have the file and we will add it to the repository
            obj_type, data = read_object(merge[f][1])
            path = os.path.join(repo_root_path, f)
            if not os.path.exists(path):
                os.makedirs(os.path.dirname(path), exist_ok=True)
            write_file(path, data)
        elif merge[f][0] == None and merge[f][1] == None:
            # if there are three entries and the first two entries are none, the local repository does not have the file
            # so we add it.
            obj_type, data = read_object(merge[f][2])
            path = os.path.join(repo_root_path, f)
            if not os.path.exists(path):
                os.makedirs(os.path.dirname(path), exist_ok=True)
            write_file(path, data)
        elif len(set(merge[f])) == 3:
            # all entries are different, so 3-way merge
            # read the content of each file
            obj_type, base_data = read_object(merge[f][0])
            obj_type, local_data = read_object(merge[f][1])
            obj_type, remote_data = read_object(merge[f][2])
            #do the 3-way merge
            had_conflict, merged_lines = three_way_merge(
                base_data.decode().splitlines(),
                local_data.decode().splitlines(),
                remote_data.decode().splitlines(), "HEAD", merge[f][2])
            # writing the merged lines into the file
            with open(os.path.join(repo_root_path, f), 'w') as file:
                for line in merged_lines:
                    file.write('{}\n'.format(line))
            if had_conflict:
                # adding file to list, so that we don't add it to the index
                conflict_files.append(f)
                path = os.path.join(repo_root_path, '.git/ORIG_HEAD')
                write_file(path, '{}\n'.format(local_sha1).encode())
                path = os.path.join(repo_root_path, '.git/MERGE_HEAD')
                write_file(path,
                           '{}\n'.format(fetch_head[:40].decode()).encode())
                path = os.path.join(repo_root_path, '.git/MERGE_MODE')
                write_file(path, b'')
                path = os.path.join(repo_root_path, '.git/MERGE_MSG')
                if os.path.exists(path):
                    # append file name to conflict
                    with open(path, 'a') as f:
                        f.write('# \t{}'.format(f))
                else:
                    repo_name = read_repo_name()
                    if not repo_name.startswith('location:'):
                        # Need to check if the return is handled by the calling function
                        print('.git/name file has an error. Exiting...')
                        return False
                    tmp = repo_name.split('location:')[1].split(':')
                    network = tmp[0].strip()
                    user_key = tmp[1].strip()
                    git_factory = get_factory_contract(network)

                    repository = git_factory.functions.getRepository(
                        user_key).call()
                    write_file(
                        path,
                        'Merge branch \'{}\' of {} into {}\n\n# Conflicts\n# \t{}\n'
                        .format(source_branch, repository[2], activeBranch,
                                f).encode())

    # adding all the files to the index. TODO: can be more efficient if we add it to the previous loop
    files_to_add = []
    pwd = os.getcwd()
    os.chdir(repo_root_path)
    for path, subdirs, files in os.walk('.'):
        for name in files:
            # we don't want to add the files under .git to the index
            if not path.startswith('./.git') and name not in conflict_files:
                files_to_add.append(os.path.join(path, name)[2:])
    os.chdir(pwd)
    add(files_to_add)
    # creating a commit object with two parents
    if not had_conflict:
        commit('Merging {} into {}'.format(source_branch, activeBranch),
               parent1=local_commits[0],
               parent2=remote_commits[0])
Esempio n. 9
0
def diff(staged):
    """Show diff of files changed (between index and working copy)."""
    # checks if there is a diff between index and HEAD
    if staged:
        # we don't use deleted for now, since we don't have git3 rm command
        changed, new, deleted = get_status_commit()

        entries_by_path = {e.path: e for e in read_index()}
        local_sha1 = get_active_branch_hash()
        commit_entries = read_commit_entries(local_sha1)

        changed.extend(new)

        for i, path in enumerate(changed):
            sha1 = entries_by_path[path].sha1.hex()
            obj_type, data = read_object(sha1)

            assert obj_type == 'blob'
            # content from file which is stored in .git/objects/
            index_lines = data.decode().splitlines()

            # if the path is not in the commit_entries dict, it means, that it is
            # available in the index but has not been committed yet
            if path in commit_entries:
                commit_path = path
                sha1 = commit_entries[path]
                obj_type, data = read_object(sha1)

                assert obj_type == 'blob'
                # content from file which is stored in .git/objects/
                commit_lines = data.decode().splitlines()
            else:
                commit_path = '/dev/null'
                commit_lines = ''

            diff_lines = difflib.unified_diff(
                commit_lines,
                index_lines,
                '{} (commit)'.format(commit_path),
                '{} (index)'.format(path),
                lineterm='')
            for line in diff_lines:
                print(line)
            if i < len(changed) - 1:
                print('-' * 70)
    else:
        # Show difference between working tree and index file
        changed, _, deleted = get_status_workspace()
        # gets all entries from the index file and puts those into a dict
        # the path is the key and IndexEntry is the value
        entries_by_path = {e.path: e for e in read_index()}

        changed.extend(deleted)

        for i, path in enumerate(changed):
            sha1 = entries_by_path[path].sha1.hex()
            obj_type, data = read_object(sha1)

            assert obj_type == 'blob'

            # content from file which is stored in .git/objects/
            index_lines = data.decode().splitlines()

            try:
                # content from file which is stored in the working directory
                working_lines = read_file(path).decode().splitlines()
                work_tree_path = path
            except FileNotFoundError:
                # when this part is triggered, it means that the file has been
                # deleted from the working tree
                working_lines = []
                work_tree_path = '/dev/null'

            diff_lines = difflib.unified_diff(
                index_lines,
                working_lines,
                '{} (index)'.format(path),
                '{} (working copy)'.format(work_tree_path),
                lineterm='')
            for line in diff_lines:
                print(line)
            if i < len(changed) - 1:
                print('-' * 70)
Esempio n. 10
0
def commit(message: str,
           author: str = None,
           parent1: str = None,
           parent2: str = None) -> str:
    """
    Commit the current state of the index to active branch with given message.
    Returns the hash of the commit object.

    Parameters:
        message (str): The message for the commit.
        author (str): The author of the commit.
        parent1 (str): The first parent of the commit.
        parent2 (str): The second parent of the commit.
    
    Returns:
        Return hash of commit object.
    """
    try:
        index = read_index()
        # we are working on write tree
        tree = hash_object(b''.join(write_tree(index)), 'tree')
    except NoRepositoryError as nre:
        print(nre)
        exit(1)

    if parent1 == None:
        # even the get_active_branch_hash throws a NoRepositoryError
        # we don't have to catch it, since we are doing it already further up in the code
        # If there is no repository, we won't reach this code here anyways.
        parent = get_active_branch_hash()
    else:
        parent = parent1

    # check if there is a MERGE_HEAD file. If there is, parent2 is set to the sha1 hash
    merge_head_path = os.path.join(get_repo_root_path(), '.git', 'MERGE_HEAD')

    if os.path.exists(merge_head_path):
        parent2 = read_file(merge_head_path).decode().strip()

    if author is None:
        # get_value_from_config_file throws a NoRepositoryError
        # but the same as above, we don't have to catch it
        user_name = get_value_from_config_file('name')
        user_email = get_value_from_config_file('email')

        author = '{} <{}>'.format(user_name, user_email)

    timestamp = int(time.mktime(time.localtime()))
    utc_offset = -time.timezone

    author_time = '{} {}{:02}{:02}'.format(timestamp,
                                           '+' if utc_offset > 0 else '-',
                                           abs(utc_offset) // 3600,
                                           (abs(utc_offset) // 60) % 60)

    lines = ['tree ' + tree]
    if parent:
        lines.append('parent ' + parent)
    if parent2 != None:
        lines.append('parent ' + parent2)
    lines.append('author {} {}'.format(author, author_time))
    lines.append('committer {} {}'.format(author, author_time))
    lines.append('')
    lines.append(message)
    lines.append('')
    data = '\n'.join(lines).encode()
    sha1 = hash_object(data, 'commit')

    repo_root_path = get_repo_root_path()
    activeBranch = get_current_branch_name()

    branch_path = os.path.join(repo_root_path, '.git', 'refs', 'heads',
                               activeBranch)
    write_file(branch_path, (sha1 + '\n').encode())

    # remove the merge files from the .git directory if committed
    if parent2 != None and os.path.exists(merge_head_path):
        os.remove(merge_head_path)
        merge_mode_path = merge_head_path.replace('MERGE_HEAD', 'MERGE_MODE')
        os.remove(merge_mode_path)
        merge_msg_path = merge_head_path.replace('MERGE_HEAD', 'MERGE_MSG')
        os.remove(merge_msg_path)

    #TODO: git returns the number of files added and changed. Would be good too
    print('[{} {}] {}'.format(activeBranch, sha1[:7], message))
    print('Author: {}'.format(author))
    return sha1
Esempio n. 11
0
def main():
    parser = argparse.ArgumentParser()
    sub_parsers = parser.add_subparsers(dest='command', metavar='command')
    sub_parsers.required = True

    # Add
    sub_parser = sub_parsers.add_parser('add', help='add file(s) to index')
    sub_parser.add_argument('paths',
                            nargs='+',
                            metavar='path',
                            help='path(s) of files to add')

    # Branch
    sub_parser = sub_parsers.add_parser('branch',
                                        help='List and Create branches')
    sub_parser.add_argument('-r',
                            '--remotes',
                            action='store_true',
                            help='act on remote-tracking branches')
    sub_parser = sub_parser.add_argument(
        'branchname',
        metavar='<branchname>',
        nargs='?',
        help='Create a new branch named <branchname>')

    # Cat-file
    sub_parser = sub_parsers.add_parser('cat-file',
                                        help='display contents of object')
    valid_modes = ['commit', 'tree', 'blob', 'size', 'type', 'pretty']
    sub_parser.add_argument(
        'mode',
        choices=valid_modes,
        help='object type (commit, tree, blob) or display mode (size, '
        'type, pretty)')
    sub_parser.add_argument(
        'hash_prefix', help='SHA-1 hash (or hash prefix) of object to display')

    # Checkout
    sub_parser = sub_parsers.add_parser('checkout', help='Switch branches')
    sub_parser.add_argument('-b',
                            action='store_true',
                            help='Create a new branch with name new_branch')
    sub_parser.add_argument('branch',
                            metavar='<branch>',
                            help='Checkout to <branch>')

    # Commit
    sub_parser = sub_parsers.add_parser(
        'commit',
        help='commit current state of index to current active branch')
    sub_parser.add_argument(
        '-a',
        '--author',
        help='commit author in format "A U Thor <*****@*****.**>" '
        '(uses GIT_AUTHOR_NAME and GIT_AUTHOR_EMAIL environment '
        'variables by default)')
    sub_parser.add_argument('-m',
                            '--message',
                            required=True,
                            help='text of commit message')

    # Create
    sub_parser = sub_parsers.add_parser('create',
                                        help='create your remote repository')
    valid_networks = ['godwoken', 'mumbai']
    sub_parser.add_argument(
        '-n',
        '--network',
        required=True,
        choices=valid_networks,
        help='Choose which network to interact with. Godwoken Testnet and'
        ' Mumbai are currently supported.')

    # Clone
    sub_parser = sub_parsers.add_parser('clone',
                                        help='create your remote repository')
    sub_parser.add_argument('name', help='name of repository to clone')

    # Diff
    sub_parser = sub_parsers.add_parser(
        'diff',
        help='show diff of files changed (between index and working '
        'copy)')
    sub_parser.add_argument(
        '--staged',
        action='store_true',
        help='This form is to view the changes you staged for the '
        'next commit relative to the HEAD commmit.')

    # Hash-object
    sub_parser = sub_parsers.add_parser(
        'hash-object',
        help='hash contents of given path (and optionally write to '
        'object store)')
    sub_parser.add_argument('path', help='path of file to hash')
    sub_parser.add_argument('-t',
                            choices=['commit', 'tree', 'blob'],
                            default='blob',
                            dest='type',
                            help='type of object (default %(default)r)')
    sub_parser.add_argument(
        '-w',
        action='store_true',
        dest='write',
        help='write object to object store (as well as printing hash)')

    # init
    sub_parser = sub_parsers.add_parser('init', help='initialize a new repo')
    sub_parser.add_argument('repo',
                            nargs='?',
                            default='.',
                            help='directory name for new repo')

    #sub_parser = sub_parsers.add_parser('ls-files',
    #help='list files in index')
    #sub_parser.add_argument('-s', '--stage', action='store_true',
    #help='show object details (mode, hash, and stage number) in '
    #'addition to path')

    # Fetch
    sub_parser = sub_parsers.add_parser(
        'fetch', help='Download object and refs from another repository')
    sub_parser.add_argument('branch', nargs='?', help='branch data to fetch')

    # Get-Address
    sub_parser = sub_parsers.add_parser('get-address',
                                        help='Get Matic wallet address')

    # Merge
    sub_parser = sub_parsers.add_parser(
        'merge', help='Join two or more development histories together')
    sub_parser.add_argument('sourceBranch',
                            nargs='?',
                            help='branch to be merged into the current branch')

    # Push
    sub_parser = sub_parsers.add_parser(
        'push', help='push current active branch to given git server URL')
    #sub_parser.add_argument('git_url',
    #        help='URL of git repo, eg: https://github.com/benhoyt/pygit.git')
    #sub_parser.add_argument('-p', '--password',
    #help='password to use for authentication (uses GIT_PASSWORD '
    #'environment variable by default)')
    #sub_parser.add_argument('-u', '--username',
    #help='username to use for authentication (uses GIT_USERNAME '
    #'environment variable by default)')

    # Pull
    sub_parser = sub_parsers.add_parser('pull', help='pulls remote commits')

    # Status
    sub_parser = sub_parsers.add_parser('status',
                                        help='show status of working copy')

    args = parser.parse_args()
    if args.command == 'add':
        add(args.paths)
    elif args.command == 'branch':
        if args.branchname:
            createBranch(args.command, args.branchname)
        else:
            listBranches(args.remotes)
    elif args.command == 'checkout':
        if args.b is False:
            checkout(args.branch)
        else:
            createBranch('checkout', args.branch)
    elif args.command == 'cat-file':
        try:
            cat_file(args.mode, args.hash_prefix)
        except ValueError as error:
            print(error, file=sys.stderr)
            sys.exit(1)
    elif args.command == 'commit':
        commit(args.message, author=args.author)
    elif args.command == 'create':
        create(args.network)
    elif args.command == 'clone':
        clone(args.name)
    elif args.command == 'diff':
        diff(args.staged)
    elif args.command == 'fetch':
        fetch(args.branch)
    elif args.command == 'get-address':
        address = getAddress()
        print('Your address is: {}'.format(address))
    elif args.command == 'hash-object':
        hashObject(read_file(args.path), args.type, write=args.write)
    elif args.command == 'init':
        init(args.repo)
    elif args.command == 'ls-files':
        ls_files(details=args.stage)
    elif args.command == 'merge':
        merge(args.sourceBranch)
    elif args.command == 'push':
        push()
    elif args.command == 'pull':
        pull()
    elif args.command == 'status':
        status()
    else:
        assert False, 'unexpected command {!r}'.format(args.command)
Esempio n. 12
0
def checkout(branch):
    if branch is None:
        print('fatal: Branch name not given.')
        exit(1)
    try:
        repo_root_path = get_repo_root_path()
    except NoRepositoryError as nre:
        print(nre)
        exit(1)

    active_branch = get_active_branch()

    if active_branch == branch:
        print('Already on \'{}\''.format(branch))
        exit(0)

    # boolen to see if the commit hash is taken from the packed-refs file
    from_packed_refs = False
    target_commit_hash = None

    # check if branch exists
    # first we check if git/refs/heads exists. If it does exist
    if os.path.isfile('{}/.git/refs/heads/{}'.format(repo_root_path, branch)):
        # we load the commit hash
        target_commit_hash = read_file('{}/.git/refs/heads/{}'.format(
            repo_root_path, branch)).decode("utf-8").strip()
    else:
        # if it doesn't exist, we check if the FETCH_HEAD file exists
        if os.path.isfile('{}/.git/FETCH_HEAD'.format(repo_root_path)):
            fetch_head_content = read_file(
                '{}/.git/FETCH_HEAD'.format(repo_root_path)).decode('utf-8')
            target_commit_hash = fetch_head_content.split(
                'branch \'{}\''.format(branch))[0].split('\n')[-1].split(
                    '\t')[0].strip()
        # if it does not exist, we check if packed-refs exists
        elif os.path.isfile('{}/.git/packed-refs'.format(repo_root_path)):
            # in case it exists, we check if the branch exists in packed-refs
            packed_refs_content = read_file(
                '{}/.git/packed-refs'.format(repo_root_path)).decode("utf-8")
            if branch in packed_refs_content:
                # get the commit hash
                from_packed_refs = True
                target_commit_hash = packed_refs_content.split(
                    'refs/remotes/origin/{}\n'.format(branch))[0].split(
                        '\n')[-1].strip()
        else:
            # if does not exist, we exit
            print(
                'error: pathspec \'{}\' did not match any file(s) known to git3'
                .format(branch))
            exit(1)

    current_commit_hash = read_file('{}/.git/refs/heads/{}'.format(
        repo_root_path, active_branch)).decode("utf-8").strip()

    # if the commit hash has been taken from the packed-refs, we need to write
    # the .git/refs/heads/<branch> file
    if from_packed_refs:
        print(
            'Branch \'{}\' set up to track remote branch \'{}\' from \'origin\'.'
            .format(branch, branch))
        write_file('{}/.git/refs/heads/{}'.format(repo_root_path, branch),
                   target_commit_hash,
                   binary='')

    if current_commit_hash == target_commit_hash:
        # switch branch when the hashes are the same.
        # we don't have to do anything else
        write_file('{}/.git/HEAD'.format(repo_root_path, branch),
                   'ref: refs/heads/{}'.format(branch),
                   binary='')
        exit(0)

    changed, new, deleted = get_status_workspace()
    if len(changed) is not 0 or len(new) is not 0 or len(deleted) is not 0:
        print_checkout_error(changed, new, deleted)

    changed, new, deleted = get_status_commit()
    if len(changed) is not 0 or len(new) is not 0 or len(deleted) is not 0:
        print_checkout_error(changed, new, deleted)

    commit_entries = read_commit_entries(target_commit_hash)

    # we are deleting all the files in the repo
    # there might be a better way, where we iterate over all of the files,
    # hash and compare the hashes. If there is no difference, leave as is, otherwise
    # overwrite. We would also need to check for files which are not in the index!
    # Maybe something at a later point in time :)
    remove_files_from_repo()

    files_to_add = []

    for filename in commit_entries:
        object_type, data = read_object(commit_entries[filename])
        assert object_type == 'blob'
        write_file('{}/{}'.format(repo_root_path, filename),
                   data.decode('utf-8'),
                   binary='')
        files_to_add.append(filename)

    # remove index file
    os.remove('{}/.git/index'.format(repo_root_path))
    add(files_to_add)
    update_HEAD(repo_root_path, branch)
    print('Switched to branch \'{}\''.format(branch))
Esempio n. 13
0
def add(paths: List[str]) -> None:
    """
    Add all file paths to git index.

    Args:
        paths (List): List of files to be added to the git index.
    
    Raises:
        NoRepositoryError: If not git repository is found.
        FileNotFoundError: If a file to be added to the index is not found.
    """
    try:
        repo_root_path = get_repo_root_path()
    except NoRepositoryError as nre:
        print(nre)
        exit(1)

    paths = [p.replace('\\', '/') for p in paths]
    all_entries = []
    # transfer paths to relative paths. Relative to the repository root
    # in case we are in a subdirectory and add a file
    paths = list(
        map(
            lambda path: os.path.relpath(os.path.abspath(path), repo_root_path
                                         ), paths))

    all_entries = read_index()

    entries = [e for e in all_entries if e.path not in paths]
    for path in paths:
        file_path = repo_root_path + '/' + path
        try:
            data = read_file(file_path)
        except FileNotFoundError:
            print(
                'fatal: pathspec \'{}\' did not match any files'.format(path))
            exit(1)
        sha1 = hash_object(data, 'blob')
        st = os.stat(file_path)
        #TODO: We will need to check for the file mode properly!
        # the spec says, that the mode is 32 bits and consists of 4 bits object type, 3 unused bits,
        # and the 9 bits of the unix permission.
        # the 4 bits has the following binary value: 1000 (regular file), 1010 (symbolic link), and 1110 (gitlink)
        # the 9 bit unix permission can be 0755 and 0644 for regular files. Symbolic links and gitLinks have
        # value 0
        # TODO: We don't do this step poperly yet. We assume, that we use a regular file!
        # TODO: this seems to cover everything! We should have a proper check regarding this!
        mode = st.st_mode
        # get the length of the file
        flags = len(file_path.split('/')[-1].encode())
        #TODO: we have to check regarding the assume-valid flag what is means!
        #TODO: I believe this is the test of flags < 0xFFF. We need to make this part clearer!
        assert flags < (1 << 12)
        # gets the relative path to the repository root folder for the index file
        relative_path = os.path.relpath(os.path.abspath(file_path),
                                        repo_root_path)
        # st.st_ctime_ns % 1000000000 this part gets only the nanosecond fraction of the timestamp
        entry = IndexEntry(int(st.st_ctime), st.st_ctime_ns % 1000000000,
                           int(st.st_mtime), st.st_mtime_ns % 1000000000,
                           st.st_dev, st.st_ino,
                           mode, st.st_uid, st.st_gid, st.st_size,
                           bytes.fromhex(sha1), flags, relative_path)
        entries.append(entry)
    entries.sort(key=operator.attrgetter('path'))
    write_index(entries)