Пример #1
0
def init(repo: str = '.'):
    """
    The init function creates an empty git repository by creating a .git directory in the current or given directory.

    Args:
        repo (str): Name of repository name. If none is given, the current directory is used.
    
    Returns:
        Boolean: Returns true if successful, false otherwise.
    """
    if os.path.exists(os.path.join(repo, '.git')):
        print(f"Repository {repo} exists already")
        return False

    cwd = os.getcwd()
    if repo != '.':
        os.mkdir(repo)
        repoName = repo
        fullPath = cwd + '/' + repo
    else:
        repoName = cwd.split('/')[-1]
        fullPath = cwd

    os.mkdir(os.path.join(repo, '.git'))

    # create necessary directories
    for name in ['objects', 'refs', 'refs/heads']:
        os.mkdir(os.path.join(repo, '.git', name))
    write_file(os.path.join(repo, '.git', 'HEAD'), b'ref: refs/heads/main')

    # write the name of the repository into a file
    write_file(os.path.join(repo, '.git', 'name'), str.encode('name: ' + repoName))
    
    print('Initialized empty Git3 repository in: {}/.git/'.format(fullPath))
    return True
Пример #2
0
def write_index(entries):
    """Write list of IndexEntry objects to git index file."""
    try:
        repo_root_path = get_repo_root_path()
    except NoRepositoryError as nre:
        raise NoRepositoryError(nre)
    packed_entries = []

    for entry in entries:
        entry_head = struct.pack('!LLLLLLLLLL20sH', entry.ctime_s,
                                 entry.ctime_n, entry.mtime_s, entry.mtime_n,
                                 entry.dev, entry.ino & 0xFFFFFFFF, entry.mode,
                                 entry.uid, entry.gid, entry.size, entry.sha1,
                                 entry.flags)
        path = entry.path.encode()
        # from ctime to object name it is 62 bytes
        # this // is integer devison
        length = ((62 + len(path) + 8) // 8) * 8
        packed_entry = entry_head + path + b'\x00' * (length - 62 - len(path))
        packed_entries.append(packed_entry)
    header = struct.pack('!4sLL', b'DIRC', 2, len(entries))
    all_data = header + b''.join(packed_entries)
    digest = hashlib.sha1(all_data).digest()
    write_file(os.path.join(repo_root_path, '.git', 'index'),
               all_data + digest)
Пример #3
0
def hash_object(data: bytes, obj_type: str, write: bool = True) -> str:
    """
    Compute hash of object data of given type and write to object store if
    "write" is True. Return SHA-1 object hash as hex string.

    Parameters:
        data (bytes): data to be hashed
        obj_type (string): Type of object to be hashed
        write (bool): Whether to write the result to file or not

    Returns:
        str: SHA-1 object hash as hex string

    Raises:
        NoRepositoryError: If no repository is found
    """
    try:
        repo_root_path = get_repo_root_path()
    except NoRepositoryError as nre:
        raise NoRepositoryError(nre)
    header = '{} {}'.format(obj_type, len(data)).encode()
    full_data = header + b'\x00' + data
    sha1 = hashlib.sha1(full_data).hexdigest()
    if write:
        path = os.path.join(repo_root_path, '.git', 'objects', sha1[:2], sha1[2:])
        if not os.path.exists(path):
            os.makedirs(os.path.dirname(path), exist_ok=True)
            write_file(path, zlib.compress(full_data))
    return sha1
Пример #4
0
def create(network):
    git_factory = get_factory_contract(network)
    repo_name = read_repo_name()
    if not repo_name.startswith('name:'):
        print(
            'The string in file .git/name is not correct. Exiting creation of remote'
        )
        return
    repo_name = repo_name.split('name:')[1].strip()

    w3 = get_web3_provider(network)

    if repo_name == '':
        print('There is no repository name.')
        return
    # #TODO: before creating tx and so on, check if this kind of repo exits already :)
    user_address = get_user_dlt_address()

    nonce = w3.eth.get_transaction_count(user_address)

    print('User address', user_address)
    gas_price = get_current_gas_price(network)

    print('Preparing transaction to create repository {}'.format(repo_name))
    create_repo_tx = git_factory.functions.createRepository(
        repo_name).buildTransaction({
            'chainId': get_chain_id(network),
            'gas': 3947750,
            'gasPrice': w3.toWei(gas_price, 'gwei'),
            'nonce': nonce,
        })

    priv_key = bytes.fromhex(get_private_key())
    print('Signing transaction')
    signed_txn = w3.eth.account.sign_transaction(create_repo_tx,
                                                 private_key=priv_key)

    print('Sending transaction')
    tx_hash = w3.eth.sendRawTransaction(signed_txn.rawTransaction)
    receipt = w3.eth.waitForTransactionReceipt(tx_hash)

    # #TODO: print a clickable link to blockexplorer
    print('Transaction hash {}'.format(
        binascii.hexlify(receipt['transactionHash']).decode()))
    if receipt['status']:
        print('Repository {:s} has been created'.format(repo_name))
        # going to replace the entry in the .git/name folder to location: <hash>
        user_key = git_factory.functions.getUserRepoNameHash(
            user_address, repo_name).call()
        user_key = '{}:0x{}'.format(network,
                                    binascii.hexlify(user_key).decode())
        #TODO: in case we are within a subdir of the repo, this is going to fail!
        write_file(os.path.join('.git', 'name'),
                   str.encode('location: ' + user_key))
    else:
        print('Creating {:s} repository failed'.format(repo_name))
Пример #5
0
def createBranch(command, newBranchName):
    """
    This function creates a new branch head named <name> which points to the current HEAD.

    command arg is used to distinguish if it has been called by checkout or branch, since it behaves
    a bit differently.
    """
    try:
        repo_root_path = get_repo_root_path()
    except NoRepositoryError as nre:
        print(nre)
        exit(1)

    pathToRef = '{}/.git/refs/heads/{}'.format(repo_root_path, newBranchName)

    # check if branch already exists
    if os.path.isfile(pathToRef):
        print('fatal: A branch named {} already exists.'.format(newBranchName))
        exit(1)
    # If not,
    currentHeadRef = read_file('{}/.git/HEAD'.format(repo_root_path)).decode(
        "utf-8").split('ref:')[1].strip()

    # check if the file under the refs/heads/ directory exists
    if os.path.isfile('{}/.git/{}'.format(repo_root_path, currentHeadRef)):
        # if file exists, then we can read the content
        # and write it into the new file
        commitHash = read_file('{}/.git/{}'.format(
            repo_root_path, currentHeadRef)).decode("utf-8")
        write_file('{}/.git/refs/heads/{}'.format(repo_root_path,
                                                  newBranchName),
                   commitHash,
                   binary='')
    else:
        # if the user executes git branch, an error is thrown
        if command == 'branch':
            print('fatal: Not a valid object name: \'{}\'.'.format(
                currentHeadRef.split('/')[-1]))
            exit(1)

    if command == 'checkout':
        # in case of git switch or checkout, the HEAD file is updated
        update_HEAD(repo_root_path, newBranchName)
        print('Switched to a new branch \'{}\''.format(newBranchName))
Пример #6
0
def write_commit(commit_object, repo_name):
    author = '{} {}'.format(commit_object['author']['name'],
                            commit_object['author']['email'])
    author_time = '{} {}'.format(commit_object['author']['date_seconds'],
                                 commit_object['author']['date_timestamp'])

    committer = '{} {}'.format(commit_object['committer']['name'],
                               commit_object['committer']['email'])
    committer_time = '{} {}'.format(
        commit_object['committer']['date_seconds'],
        commit_object['committer']['date_timestamp'])
    lines = []

    client = getStorageClient()
    tree_obj = client.get_json(commit_object['tree'])

    lines = ['tree ' + tree_obj['sha1']]
    if commit_object['parents']:
        for parent in commit_object['parents']:
            parent_obj = client.get_json(parent)
            remote_commit_sha1 = parent_obj['sha1']
            lines.append('parent ' + remote_commit_sha1)
    lines.append('author {} {}'.format(author, author_time))
    lines.append('committer {} {}'.format(committer, committer_time))
    lines.append('')
    lines.append(commit_object['commit_message'])
    lines.append('')
    data = '\n'.join(lines).encode()
    header = '{} {}'.format('commit', len(data)).encode()
    full_data = header + b'\x00' + data

    path = os.path.join(repo_name, '.git', 'objects',
                        commit_object['sha1'][:2], commit_object['sha1'][2:])
    if not os.path.exists(path):
        os.makedirs(os.path.dirname(path), exist_ok=True)
        write_file(path, zlib.compress(full_data))
Пример #7
0
def unpack_object(object_hash, repo_path, path_to_write):
    """
    Takes an tree sha1 hash and read the local object. It iterates over the entries and writes the content of blobs
    to the repository. In case it comes across another tree object, it makes a recursive call.
    """
    #TODO: have to make it more robust. What if it is not a tree object?
    from .gitTree import read_tree

    entries = read_tree(object_hash)
    for entry in entries:
        if entry[0] == GIT_NORMAL_FILE_MODE:
            object_path = os.path.join(repo_path, '.git/objects', entry[2][:2], entry[2][2:])
            full_data = zlib.decompress(read_file(object_path))
            nul_index = full_data.index(b'\x00')
            header = full_data[:nul_index]
            obj_type, size_str = header.decode().split()
            size = int(size_str)
            data = full_data[nul_index + 1:]
            data_path = os.path.join(path_to_write, entry[1])
            if not os.path.exists(data_path):
                os.makedirs(os.path.dirname(data_path), exist_ok=True)
            write_file(data_path, data)
        elif entry[0] == GIT_TREE_MODE:
            unpack_object(entry[2], repo_path, os.path.join(path_to_write, entry[1]))
Пример #8
0
def unpack_files_of_tree(repo_name, path_to_write, tree, unpack_blobs):
    """
    Gets a tree object and unpacks the references. The content of the blobs are written into a file if unpack_blobs
    is set true. Otherwise only the git objects are created
    """
    tree_entries = []
    client = getStorageClient()
    for entry in tree['entries']:
        if entry['mode'] == GIT_NORMAL_FILE_MODE:
            blob = client.get_json(entry['cid'])
            # write content to the file if wanted
            if unpack_blobs:
                path = os.path.join(path_to_write, entry['name'])
                if not os.path.exists(path):
                    os.makedirs(os.path.dirname(path), exist_ok=True)
                write_file(path, blob['content'].encode())
            # time to create blob object if doesn't exists yet
            path = os.path.join(repo_name, '.git', 'objects', blob['sha1'][:2], blob['sha1'][2:])
            if not os.path.exists(path):
                header = '{} {}'.format('blob', len(blob['content'])).encode()
                full_data = header + b'\x00' + blob['content'].encode()
                os.makedirs(os.path.dirname(path), exist_ok=True)
                write_file(path, zlib.compress(full_data))
            # creating entry for tree object
            mode_path = '{:o} {}'.format(GIT_NORMAL_FILE_MODE, entry['name']).encode()
            tree_entry = mode_path + b'\x00' + binascii.unhexlify(blob['sha1'])
            tree_entries.append(tree_entry)
        elif entry['mode'] == GIT_TREE_MODE:
            sub_tree = client.get_json(entry['cid'])
            unpack_files_of_tree(repo_name, "{}/{}".format(path_to_write, entry['name']), sub_tree, unpack_blobs)
            mode_path = '{:o} {}'.format(GIT_TREE_MODE, entry['name']).encode()
            tree_entry = mode_path + b'\x00' + binascii.unhexlify(sub_tree['sha1'])
            tree_entries.append(tree_entry)

    data = b''.join(tree_entries)
    obj_type = 'tree'
    header = '{} {}'.format(obj_type, len(data)).encode()
    full_data = header + b'\x00' + data
    path = os.path.join(repo_name, '.git', 'objects', tree['sha1'][:2], tree['sha1'][2:])
    if not os.path.exists(path):
        os.makedirs(os.path.dirname(path), exist_ok=True)
        write_file(path, zlib.compress(full_data))
Пример #9
0
def merge(source_branch):
    """
    Merges two branches. If the source_branch parameter is set, the source branch is merged into the current branch.
    If the parameter is not set, a merge from FETCH_HEAD is performed.
    """
    had_conflict = False
    repo_root_path = get_repo_root_path()
    # if no source branch for merge is give, we go through the FETCH_HEAD file
    if source_branch is None:
        fetch_head_path = os.path.join(repo_root_path, '.git/FETCH_HEAD')
        if not os.path.exists(fetch_head_path):
            print('Nothing to merge. Have you called fetch before?')
            return
        fetch_head_content = read_file(fetch_head_path).decode('utf-8')

        findings = re.findall('^([ABCDEFabcdef0-9]+)\s+branch (\w|\')+',
                              fetch_head_content)
        if len(findings) == 0:
            remote_sha1 = None
        else:
            remote_sha1 = findings[0][0]
    else:
        # otherwise we are looking for the refs file first.
        source_branch_head_path = os.path.join(repo_root_path,
                                               '.git/refs/heads/',
                                               source_branch)
        if not os.path.exists(source_branch_head_path):
            # if the refs file does not exist, we are having a look if the packed-refs file exits
            # git doesn't use the FETCH_HEAD file when a branch name is given!
            packed_refs_path = os.path.join(repo_root_path, '.git/packed-refs')
            if not os.path.exists(packed_refs_path):
                # if not, we are printing an error message and return
                remote_sha1 = None
            # otherwise we read the packed-refs file
            packed_refs_content = read_file(packed_refs_path).decode('utf-8')
            # and read the commit hash
            findings = re.findall(
                '([ABCDEFabcdef0-9]*) refs\/remotes\/origin\/{}'.format(
                    source_branch), packed_refs_content)
            if len(findings) == 0:
                remote_sha1 = None
            else:
                remote_sha1 = findings[0]
        else:
            # if the file exists, we read the sha1 from it
            remote_sha1 = read_file(source_branch_head_path).decode('utf-8')

    if remote_sha1 is None:
        print('merge: {} - not something we can merge'.format(source_branch))
        exit(1)

    activeBranch = get_current_branch_name()
    local_sha1 = get_active_branch_hash()

    remote_sha1 = remote_sha1.strip()
    local_sha1 = local_sha1.strip()

    if remote_sha1 == local_sha1:
        return
    remote_commits = get_all_local_commits(remote_sha1)
    local_commits = get_all_local_commits(local_sha1)

    difference = set(local_commits) - set(remote_commits)

    if len(difference) == 0:
        #fast forward strategy
        path = os.path.join(repo_root_path,
                            '.git/refs/heads/{}'.format(activeBranch))
        write_file(path, "{}\n".format(remote_sha1).encode())
        obj_type, commit_data = read_object(remote_sha1.strip())
        tree_sha1 = commit_data.decode().splitlines()[0][5:45]
        unpack_object(tree_sha1, repo_root_path, repo_root_path)
        return

    # non fast forward strategy
    intersection = set(local_commits).intersection(remote_commits)
    for commit_hash in remote_commits:
        if commit_hash in intersection:
            ancestor = commit_hash
            break

    # We need to find an ancestor and run 3-way merge on these files!
    # than we need to create a new tree and a commit object with 2 parents

    obj_type, ancestor_commit = read_object(ancestor)
    obj_type, a_commit = read_object(local_commits[0])
    obj_type, b_commit = read_object(remote_commits[0])
    # list for the 3 branches
    ancestor_entries = []
    a_entries = []
    b_entries = []
    # here we get a list in the following format [(filename, sha1), (filename, sha2), ...]
    get_subtree_entries(ancestor_commit.splitlines()[0][5:45].decode(), '',
                        ancestor_entries)
    get_subtree_entries(a_commit.splitlines()[0][5:45].decode(), '', a_entries)
    get_subtree_entries(b_commit.splitlines()[0][5:45].decode(), '', b_entries)

    merge = {}
    # wo go through each list and use the filename as key and create a list of hashed
    for e in ancestor_entries:
        if e[0] not in merge:
            merge[e[0]] = [e[1]]

    for e in a_entries:
        if e[0] not in merge:
            merge[e[0]] = [None, e[1]]
        else:
            merge[e[0]].append(e[1])

    for e in b_entries:
        if e[0] not in merge:
            merge[e[0]] = [None, None, e[1]]
        else:
            merge[e[0]].append(e[1])

    # if all hashes are the same, there is nothing we have to do
    # In case the second and third entry are not None, but the first one is: I am not sure if this case actually is possible
    conflict_files = []
    for f in merge:
        if len(merge[f]) == 2 and merge[f][0] != merge[f][1]:
            # if there are only two entries, the remote branch does not have the file and we will add it to the repository
            obj_type, data = read_object(merge[f][1])
            path = os.path.join(repo_root_path, f)
            if not os.path.exists(path):
                os.makedirs(os.path.dirname(path), exist_ok=True)
            write_file(path, data)
        elif merge[f][0] == None and merge[f][1] == None:
            # if there are three entries and the first two entries are none, the local repository does not have the file
            # so we add it.
            obj_type, data = read_object(merge[f][2])
            path = os.path.join(repo_root_path, f)
            if not os.path.exists(path):
                os.makedirs(os.path.dirname(path), exist_ok=True)
            write_file(path, data)
        elif len(set(merge[f])) == 3:
            # all entries are different, so 3-way merge
            # read the content of each file
            obj_type, base_data = read_object(merge[f][0])
            obj_type, local_data = read_object(merge[f][1])
            obj_type, remote_data = read_object(merge[f][2])
            #do the 3-way merge
            had_conflict, merged_lines = three_way_merge(
                base_data.decode().splitlines(),
                local_data.decode().splitlines(),
                remote_data.decode().splitlines(), "HEAD", merge[f][2])
            # writing the merged lines into the file
            with open(os.path.join(repo_root_path, f), 'w') as file:
                for line in merged_lines:
                    file.write('{}\n'.format(line))
            if had_conflict:
                # adding file to list, so that we don't add it to the index
                conflict_files.append(f)
                path = os.path.join(repo_root_path, '.git/ORIG_HEAD')
                write_file(path, '{}\n'.format(local_sha1).encode())
                path = os.path.join(repo_root_path, '.git/MERGE_HEAD')
                write_file(path,
                           '{}\n'.format(fetch_head[:40].decode()).encode())
                path = os.path.join(repo_root_path, '.git/MERGE_MODE')
                write_file(path, b'')
                path = os.path.join(repo_root_path, '.git/MERGE_MSG')
                if os.path.exists(path):
                    # append file name to conflict
                    with open(path, 'a') as f:
                        f.write('# \t{}'.format(f))
                else:
                    repo_name = read_repo_name()
                    if not repo_name.startswith('location:'):
                        # Need to check if the return is handled by the calling function
                        print('.git/name file has an error. Exiting...')
                        return False
                    tmp = repo_name.split('location:')[1].split(':')
                    network = tmp[0].strip()
                    user_key = tmp[1].strip()
                    git_factory = get_factory_contract(network)

                    repository = git_factory.functions.getRepository(
                        user_key).call()
                    write_file(
                        path,
                        'Merge branch \'{}\' of {} into {}\n\n# Conflicts\n# \t{}\n'
                        .format(source_branch, repository[2], activeBranch,
                                f).encode())

    # adding all the files to the index. TODO: can be more efficient if we add it to the previous loop
    files_to_add = []
    pwd = os.getcwd()
    os.chdir(repo_root_path)
    for path, subdirs, files in os.walk('.'):
        for name in files:
            # we don't want to add the files under .git to the index
            if not path.startswith('./.git') and name not in conflict_files:
                files_to_add.append(os.path.join(path, name)[2:])
    os.chdir(pwd)
    add(files_to_add)
    # creating a commit object with two parents
    if not had_conflict:
        commit('Merging {} into {}'.format(source_branch, activeBranch),
               parent1=local_commits[0],
               parent2=remote_commits[0])
Пример #10
0
def pull():
    print('Pulling')
    changed, _, _ = get_status_workspace()
    # we are checking if there a changed files in the working copy or files staged which have not been committed.
    # if one case is true, pull won't be executed
    if len(changed) > 0 or not is_stage_empty():
        print("You have local changes. Add and commit those first")
        return

    repo_name = read_repo_name()
    if not repo_name.startswith('location:'):
        print('.git/name file has an error. Exiting...')
        return False
    tmp = repo_name.split('location:')[1].split(':')
    network = tmp[0].strip()
    user_key = tmp[1].strip()

    git_factory = get_factory_contract(network)
    repository = git_factory.functions.getRepository(user_key).call()

    if not repository[0]:
        print('No such repository')
        return
    git_repo_address = repository[2]

    activeBranch = get_current_branch_name()

    branch_contract = get_facet_contract("GitBranch", git_repo_address,
                                         network)

    branch = branch_contract.functions.getBranch(activeBranch).call()
    headCid = branch[1]

    remote_commits = get_all_remote_commits(headCid)

    #extract only the sha1 hash
    remote_commits_sha1 = [e['sha1'] for e in remote_commits]

    root_path = get_repo_root_path()
    local_commit = get_active_branch_hash()
    local_commits = get_all_local_commits(local_commit)

    if local_commits[0] == remote_commits_sha1[0]:
        print('Already up to date')
        return

    remote_to_local_difference = set(remote_commits_sha1) - set(local_commits)
    local_to_remote_difference = set(local_commits) - set(remote_commits_sha1)

    if len(remote_to_local_difference
           ) == 0 and len(local_to_remote_difference) > 0:
        print('You are ahead of remote branch')
        return
    elif len(remote_to_local_difference) == 0 and len(
            local_to_remote_difference) == 0:
        print('Nothing to pull')
        return
    elif len(local_to_remote_difference) == 0:
        # alright, we filtered what needs to be downloaded and unpacked
        # check clone on how to do that!
        remote_commits = list(
            filter(lambda x: x['sha1'] in remote_to_local_difference,
                   remote_commits))
        repo_name = root_path.split('/')[-1]
        #unpack files from the newest commit
        first = True
        for commit in remote_commits:
            unpack_files_of_commit(root_path, commit, first)
            first = False
        refs_path = os.path.join(root_path, '.git', 'refs', 'heads',
                                 activeBranch)
        write_file(refs_path, (remote_commits[0]['sha1'] + '\n').encode())

        # we are deleting all the files in the repo
        # there might be a better way, where we iterate over all of the files,
        # hash and compare the hashes. If there is no difference, leave as is, otherwise
        # overwrite. We would also need to check for files which are not in the index!
        # Maybe something at a later point in time :)
        # Same at checkout
        commit_entries = read_commit_entries(remote_commits[0]['sha1'])
        remove_files_from_repo()

        files_to_add = []

        for filename in commit_entries:
            object_type, data = read_object(commit_entries[filename])
            assert object_type == 'blob'
            write_file('{}/{}'.format(root_path, filename),
                       data.decode('utf-8'),
                       binary='')
            files_to_add.append(filename)

        # remove index file
        os.remove('{}/.git/index'.format(root_path))
        add(files_to_add)
Пример #11
0
def update_HEAD(pathToRepo, newBranchName):
    write_file('{}/.git/HEAD'.format(pathToRepo),
               'ref: refs/heads/{}'.format(newBranchName),
               binary='')
Пример #12
0
def commit(message: str,
           author: str = None,
           parent1: str = None,
           parent2: str = None) -> str:
    """
    Commit the current state of the index to active branch with given message.
    Returns the hash of the commit object.

    Parameters:
        message (str): The message for the commit.
        author (str): The author of the commit.
        parent1 (str): The first parent of the commit.
        parent2 (str): The second parent of the commit.
    
    Returns:
        Return hash of commit object.
    """
    try:
        index = read_index()
        # we are working on write tree
        tree = hash_object(b''.join(write_tree(index)), 'tree')
    except NoRepositoryError as nre:
        print(nre)
        exit(1)

    if parent1 == None:
        # even the get_active_branch_hash throws a NoRepositoryError
        # we don't have to catch it, since we are doing it already further up in the code
        # If there is no repository, we won't reach this code here anyways.
        parent = get_active_branch_hash()
    else:
        parent = parent1

    # check if there is a MERGE_HEAD file. If there is, parent2 is set to the sha1 hash
    merge_head_path = os.path.join(get_repo_root_path(), '.git', 'MERGE_HEAD')

    if os.path.exists(merge_head_path):
        parent2 = read_file(merge_head_path).decode().strip()

    if author is None:
        # get_value_from_config_file throws a NoRepositoryError
        # but the same as above, we don't have to catch it
        user_name = get_value_from_config_file('name')
        user_email = get_value_from_config_file('email')

        author = '{} <{}>'.format(user_name, user_email)

    timestamp = int(time.mktime(time.localtime()))
    utc_offset = -time.timezone

    author_time = '{} {}{:02}{:02}'.format(timestamp,
                                           '+' if utc_offset > 0 else '-',
                                           abs(utc_offset) // 3600,
                                           (abs(utc_offset) // 60) % 60)

    lines = ['tree ' + tree]
    if parent:
        lines.append('parent ' + parent)
    if parent2 != None:
        lines.append('parent ' + parent2)
    lines.append('author {} {}'.format(author, author_time))
    lines.append('committer {} {}'.format(author, author_time))
    lines.append('')
    lines.append(message)
    lines.append('')
    data = '\n'.join(lines).encode()
    sha1 = hash_object(data, 'commit')

    repo_root_path = get_repo_root_path()
    activeBranch = get_current_branch_name()

    branch_path = os.path.join(repo_root_path, '.git', 'refs', 'heads',
                               activeBranch)
    write_file(branch_path, (sha1 + '\n').encode())

    # remove the merge files from the .git directory if committed
    if parent2 != None and os.path.exists(merge_head_path):
        os.remove(merge_head_path)
        merge_mode_path = merge_head_path.replace('MERGE_HEAD', 'MERGE_MODE')
        os.remove(merge_mode_path)
        merge_msg_path = merge_head_path.replace('MERGE_HEAD', 'MERGE_MSG')
        os.remove(merge_msg_path)

    #TODO: git returns the number of files added and changed. Would be good too
    print('[{} {}] {}'.format(activeBranch, sha1[:7], message))
    print('Author: {}'.format(author))
    return sha1
Пример #13
0
def checkout(branch):
    if branch is None:
        print('fatal: Branch name not given.')
        exit(1)
    try:
        repo_root_path = get_repo_root_path()
    except NoRepositoryError as nre:
        print(nre)
        exit(1)

    active_branch = get_active_branch()

    if active_branch == branch:
        print('Already on \'{}\''.format(branch))
        exit(0)

    # boolen to see if the commit hash is taken from the packed-refs file
    from_packed_refs = False
    target_commit_hash = None

    # check if branch exists
    # first we check if git/refs/heads exists. If it does exist
    if os.path.isfile('{}/.git/refs/heads/{}'.format(repo_root_path, branch)):
        # we load the commit hash
        target_commit_hash = read_file('{}/.git/refs/heads/{}'.format(
            repo_root_path, branch)).decode("utf-8").strip()
    else:
        # if it doesn't exist, we check if the FETCH_HEAD file exists
        if os.path.isfile('{}/.git/FETCH_HEAD'.format(repo_root_path)):
            fetch_head_content = read_file(
                '{}/.git/FETCH_HEAD'.format(repo_root_path)).decode('utf-8')
            target_commit_hash = fetch_head_content.split(
                'branch \'{}\''.format(branch))[0].split('\n')[-1].split(
                    '\t')[0].strip()
        # if it does not exist, we check if packed-refs exists
        elif os.path.isfile('{}/.git/packed-refs'.format(repo_root_path)):
            # in case it exists, we check if the branch exists in packed-refs
            packed_refs_content = read_file(
                '{}/.git/packed-refs'.format(repo_root_path)).decode("utf-8")
            if branch in packed_refs_content:
                # get the commit hash
                from_packed_refs = True
                target_commit_hash = packed_refs_content.split(
                    'refs/remotes/origin/{}\n'.format(branch))[0].split(
                        '\n')[-1].strip()
        else:
            # if does not exist, we exit
            print(
                'error: pathspec \'{}\' did not match any file(s) known to git3'
                .format(branch))
            exit(1)

    current_commit_hash = read_file('{}/.git/refs/heads/{}'.format(
        repo_root_path, active_branch)).decode("utf-8").strip()

    # if the commit hash has been taken from the packed-refs, we need to write
    # the .git/refs/heads/<branch> file
    if from_packed_refs:
        print(
            'Branch \'{}\' set up to track remote branch \'{}\' from \'origin\'.'
            .format(branch, branch))
        write_file('{}/.git/refs/heads/{}'.format(repo_root_path, branch),
                   target_commit_hash,
                   binary='')

    if current_commit_hash == target_commit_hash:
        # switch branch when the hashes are the same.
        # we don't have to do anything else
        write_file('{}/.git/HEAD'.format(repo_root_path, branch),
                   'ref: refs/heads/{}'.format(branch),
                   binary='')
        exit(0)

    changed, new, deleted = get_status_workspace()
    if len(changed) is not 0 or len(new) is not 0 or len(deleted) is not 0:
        print_checkout_error(changed, new, deleted)

    changed, new, deleted = get_status_commit()
    if len(changed) is not 0 or len(new) is not 0 or len(deleted) is not 0:
        print_checkout_error(changed, new, deleted)

    commit_entries = read_commit_entries(target_commit_hash)

    # we are deleting all the files in the repo
    # there might be a better way, where we iterate over all of the files,
    # hash and compare the hashes. If there is no difference, leave as is, otherwise
    # overwrite. We would also need to check for files which are not in the index!
    # Maybe something at a later point in time :)
    remove_files_from_repo()

    files_to_add = []

    for filename in commit_entries:
        object_type, data = read_object(commit_entries[filename])
        assert object_type == 'blob'
        write_file('{}/{}'.format(repo_root_path, filename),
                   data.decode('utf-8'),
                   binary='')
        files_to_add.append(filename)

    # remove index file
    os.remove('{}/.git/index'.format(repo_root_path))
    add(files_to_add)
    update_HEAD(repo_root_path, branch)
    print('Switched to branch \'{}\''.format(branch))
Пример #14
0
def clone(repo_name):
    """
    Cloning a remote repository on the local machine.

    repo_name: Repository to be cloned
    """

    user_address, repo_name = repo_name.split('/')
    network, user_address = user_address.split(':')

    if network != 'mumbai' and network != 'godwoken':
        print(f"Network {network} not supported")
        return

    git_factory = get_factory_contract(network)
    user_key = git_factory.functions.getUserRepoNameHash(
        user_address, repo_name).call()
    user_key = '0x{}'.format(binascii.hexlify(user_key).decode())

    repository = git_factory.functions.getRepository(user_key).call()

    if not repository[0] or repository[1] != repo_name:
        print('No such repository')
        return
    git_repo_address = repository[2]

    branch_contract = get_facet_contract("GitBranch", git_repo_address,
                                         network)

    branches = branch_contract.functions.getBranchNames().call()

    # string, which is going to be written into the .git/packed-refs file
    packed_refs_content = ""
    head_cids = set()

    main_cid = None

    default_branch_name = 'main' if 'main' in branches else branches[0]

    for branch_name in branches:
        branch = branch_contract.functions.getBranch(branch_name).call()
        head_cids.add(branch[1])
        packed_refs_content += '{} refs/remotes/origin/{}\n'.format(
            branch[1], branch_name)
        if branch_name == default_branch_name:
            main_cid = branch[1]

    print('Cloning {:s}'.format(repo_name))
    # initialize repository
    if not init(repo_name):
        return

    # get all remote commits
    for head_cid in head_cids:
        commits = get_all_remote_commits(head_cid)

        # replacing cid with sha1
        packed_refs_content = packed_refs_content.replace(
            head_cid, commits[0]['sha1'])

        # we are going to unpack only the files for the main branch. Commits and all
        # other git objects should be still downloaded
        if head_cid == main_cid:
            # write to refs
            main_ref_path = os.path.join(repo_name, '.git', 'refs', 'heads',
                                         default_branch_name)
            write_file(main_ref_path, (commits[0]['sha1'] + '\n').encode())
            head_ref_path = os.path.join(repo_name, '.git', 'HEAD')
            write_file(
                head_ref_path,
                ('ref: refs/heads/{}\n'.format(default_branch_name)).encode())
            first = True
        else:
            first = False

        for commit in commits:
            unpack_files_of_commit(repo_name, commit, first)
            first = False

    #chaning into repo, also for add function, in order to find the index file
    os.chdir(repo_name)

    # write packed-refs
    write_file('.git/packed-refs', packed_refs_content, binary='')

    write_file('.git/name', str.encode(f"location: {network}:{user_key}"))
    # collecting all files from the repo in order to create the index file
    files_to_add = []
    for path, subdirs, files in os.walk('.'):
        for name in files:
            # we don't want to add the files under .git to the index
            if not path.startswith('./.git'):
                files_to_add.append(os.path.join(path, name)[2:])
    add(files_to_add)
    print('{:s} cloned'.format(repo_name))
Пример #15
0
def fetch(branchName):
    """
    Downloads commits and objects from the remote repository
    """
    repo_name = read_repo_name()
    if not repo_name.startswith('location:'):
        # Need to check if the return is handled by the calling function
        print('.git/name file has an error. Exiting...')
        return False
    tmp = repo_name.split('location:')[1].split(':')
    network = tmp[0].strip()
    user_key = tmp[1].strip()

    git_factory = get_factory_contract(network)
    active_branch = get_current_branch_name()

    repository = git_factory.functions.getRepository(user_key).call()

    if not repository[0]:
        print('No such repository')
        return

    git_repo_address = repository[2]

    branch_contract = get_facet_contract("GitBranch", git_repo_address,
                                         network)

    # fetch_data will contain tuples in the following format
    # (branch_name, head_cid, head_commit_sha1 of branch)
    fetch_data = []

    # if branchName is none, the user called git3 fetch
    # so we collect data from all branches
    if branchName is None:
        branches = branch_contract.functions.getBranchNames().call()
        for branch_name in branches:
            # returns tuple (bool, headcid)
            branch = branch_contract.functions.getBranch(branch_name).call()
            branch_commit_hash = get_branch_hash(branch_name)
            fetch_data.append((branch_name, branch[1], branch_commit_hash))
    else:
        # returns tuple (bool, headcid)
        branch = branch_contract.functions.getBranch(branchName).call()

        if not branch[1]:
            print('fatal: couldn\'t find remote ref {}'.format(branchName))
            return False

        branch_commit_hash = get_branch_hash(branch_name)
        fetch_data.append((branch_name, branch[1], branch_commit_hash))

    repo_root_path = get_repo_root_path()
    fetch_head_data = ''
    # get all remote commits
    for data in fetch_data:
        remote_commits = get_all_remote_commits(data[1])

        #extract only the sha1 hash
        remote_commits_sha1 = [e['sha1'] for e in remote_commits]

        local_commits = get_all_local_commits(data[2])

        if data[0] != active_branch:
            not_for_merge = 'not-for-merge'
        else:
            not_for_merge = ''

        # preparing FETCH_HEAD file content
        fetch_head_data = '{}{}\t{}\t{}\'{}\' of {}\n'.format(
            fetch_head_data, remote_commits_sha1[0], not_for_merge, 'branch ',
            data[0], git_repo_address)

        # write the remote commit to the refs/remotes/origin/[branchName] file
        write_file(
            os.path.join(repo_root_path, '.git/refs/remotes/origin/', data[0]),
            '{}\n'.format(remote_commits_sha1[0]), '')

        # check if we have any local commits
        # if local_commits length is zero, there are no local commits for that particular branch
        # so we need to download those!
        # if the first sha1 are equal, we don't need to download anything
        if len(local_commits
               ) > 0 and local_commits[0] == remote_commits_sha1[0]:
            continue

        remote_to_local_difference = set(remote_commits_sha1) - set(
            local_commits)

        # transfer the data from ipfs into git objects on the local machine
        for commit_hash in remote_to_local_difference:
            for commit in remote_commits:
                if commit['sha1'] == commit_hash:
                    unpack_files_of_commit(repo_root_path, commit, False)

    path = os.path.join(repo_root_path, '.git', 'FETCH_HEAD')
    write_file(path, fetch_head_data, '')