Example #1
0
def get_remote_branch_hash(branchName):
    """
    Get commit hash of remote branch branchName, return CID or None if no remote commits.
    """
    repo_name = read_repo_name()
    if not repo_name.startswith('location:'):
        return
    tmp = repo_name.split('location:')[1].split(':')
    network = tmp[0].strip()
    user_key = tmp[1].strip()

    git_factory = get_factory_contract(network)

    repository = git_factory.functions.getRepository(user_key).call()

    if not repository[0]:
        print('No such repository')
        return
    git_repo_address = repository[2]
    branch_contract = get_facet_contract("GitBranch", git_repo_address,
                                         network)

    branch = branch_contract.functions.getBranch(branchName).call()

    # check if the branch is active
    if not branch[0]:
        return None
    # if active, return head cid
    return branch[1]
Example #2
0
def create(network):
    git_factory = get_factory_contract(network)
    repo_name = read_repo_name()
    if not repo_name.startswith('name:'):
        print(
            'The string in file .git/name is not correct. Exiting creation of remote'
        )
        return
    repo_name = repo_name.split('name:')[1].strip()

    w3 = get_web3_provider(network)

    if repo_name == '':
        print('There is no repository name.')
        return
    # #TODO: before creating tx and so on, check if this kind of repo exits already :)
    user_address = get_user_dlt_address()

    nonce = w3.eth.get_transaction_count(user_address)

    print('User address', user_address)
    gas_price = get_current_gas_price(network)

    print('Preparing transaction to create repository {}'.format(repo_name))
    create_repo_tx = git_factory.functions.createRepository(
        repo_name).buildTransaction({
            'chainId': get_chain_id(network),
            'gas': 3947750,
            'gasPrice': w3.toWei(gas_price, 'gwei'),
            'nonce': nonce,
        })

    priv_key = bytes.fromhex(get_private_key())
    print('Signing transaction')
    signed_txn = w3.eth.account.sign_transaction(create_repo_tx,
                                                 private_key=priv_key)

    print('Sending transaction')
    tx_hash = w3.eth.sendRawTransaction(signed_txn.rawTransaction)
    receipt = w3.eth.waitForTransactionReceipt(tx_hash)

    # #TODO: print a clickable link to blockexplorer
    print('Transaction hash {}'.format(
        binascii.hexlify(receipt['transactionHash']).decode()))
    if receipt['status']:
        print('Repository {:s} has been created'.format(repo_name))
        # going to replace the entry in the .git/name folder to location: <hash>
        user_key = git_factory.functions.getUserRepoNameHash(
            user_address, repo_name).call()
        user_key = '{}:0x{}'.format(network,
                                    binascii.hexlify(user_key).decode())
        #TODO: in case we are within a subdir of the repo, this is going to fail!
        write_file(os.path.join('.git', 'name'),
                   str.encode('location: ' + user_key))
    else:
        print('Creating {:s} repository failed'.format(repo_name))
Example #3
0
def push_new_cid(branchName, cid):
    repo_name = read_repo_name()
    if not repo_name.startswith('location:'):
        print('.git/name file has an error. Exiting...')
        return False
    tmp = repo_name.split('location:')[1].split(':')
    network = tmp[0].strip()
    user_key = tmp[1].strip()

    git_factory = get_factory_contract(network)
    user_address = get_user_dlt_address()

    repository = git_factory.functions.getRepository(user_key).call()

    git_repo_address = repository[2]

    branch_contract = get_facet_contract("GitBranch", git_repo_address,
                                         network)
    w3 = get_web3_provider(network)

    nonce = w3.eth.get_transaction_count(user_address)

    gas_price = get_current_gas_price(network)

    create_push_tx = branch_contract.functions.push(branchName,
                                                    cid).buildTransaction({
                                                        'chainId':
                                                        get_chain_id(network),
                                                        'gas':
                                                        746427,
                                                        'gasPrice':
                                                        w3.toWei(
                                                            gas_price, 'gwei'),
                                                        'nonce':
                                                        nonce,
                                                    })
    priv_key = bytes.fromhex(get_private_key())
    print('Signing transaction')
    signed_txn = w3.eth.account.sign_transaction(create_push_tx,
                                                 private_key=priv_key)
    tx_hash = w3.eth.sendRawTransaction(signed_txn.rawTransaction)
    receipt = w3.eth.waitForTransactionReceipt(tx_hash)
    print('Transaction hash {}'.format(
        binascii.hexlify(receipt['transactionHash']).decode()))
    if receipt['status']:
        print('Successfully pushed')
    else:
        print('Pushing failed')
Example #4
0
def check_if_repo_created():
    """
    Checks if the repository has been already registered in the gitFactory contract
    If it hasn't, False is returned, otherwise True
    """
    repo_name = read_repo_name()
    if not repo_name.startswith('location:'):
        return False
    tmp = repo_name.split('location:')[1].split(':')
    network = tmp[0].strip()
    user_key = tmp[1].strip()

    w3 = get_web3_provider(network)
    if not w3.isConnected():
        #TODO: Throw an exception
        print('No connection. Establish a connection first')
        return False

    git_factory = get_factory_contract(network)

    return git_factory.functions.getRepository(user_key).call()[0]
Example #5
0
def merge(source_branch):
    """
    Merges two branches. If the source_branch parameter is set, the source branch is merged into the current branch.
    If the parameter is not set, a merge from FETCH_HEAD is performed.
    """
    had_conflict = False
    repo_root_path = get_repo_root_path()
    # if no source branch for merge is give, we go through the FETCH_HEAD file
    if source_branch is None:
        fetch_head_path = os.path.join(repo_root_path, '.git/FETCH_HEAD')
        if not os.path.exists(fetch_head_path):
            print('Nothing to merge. Have you called fetch before?')
            return
        fetch_head_content = read_file(fetch_head_path).decode('utf-8')

        findings = re.findall('^([ABCDEFabcdef0-9]+)\s+branch (\w|\')+',
                              fetch_head_content)
        if len(findings) == 0:
            remote_sha1 = None
        else:
            remote_sha1 = findings[0][0]
    else:
        # otherwise we are looking for the refs file first.
        source_branch_head_path = os.path.join(repo_root_path,
                                               '.git/refs/heads/',
                                               source_branch)
        if not os.path.exists(source_branch_head_path):
            # if the refs file does not exist, we are having a look if the packed-refs file exits
            # git doesn't use the FETCH_HEAD file when a branch name is given!
            packed_refs_path = os.path.join(repo_root_path, '.git/packed-refs')
            if not os.path.exists(packed_refs_path):
                # if not, we are printing an error message and return
                remote_sha1 = None
            # otherwise we read the packed-refs file
            packed_refs_content = read_file(packed_refs_path).decode('utf-8')
            # and read the commit hash
            findings = re.findall(
                '([ABCDEFabcdef0-9]*) refs\/remotes\/origin\/{}'.format(
                    source_branch), packed_refs_content)
            if len(findings) == 0:
                remote_sha1 = None
            else:
                remote_sha1 = findings[0]
        else:
            # if the file exists, we read the sha1 from it
            remote_sha1 = read_file(source_branch_head_path).decode('utf-8')

    if remote_sha1 is None:
        print('merge: {} - not something we can merge'.format(source_branch))
        exit(1)

    activeBranch = get_current_branch_name()
    local_sha1 = get_active_branch_hash()

    remote_sha1 = remote_sha1.strip()
    local_sha1 = local_sha1.strip()

    if remote_sha1 == local_sha1:
        return
    remote_commits = get_all_local_commits(remote_sha1)
    local_commits = get_all_local_commits(local_sha1)

    difference = set(local_commits) - set(remote_commits)

    if len(difference) == 0:
        #fast forward strategy
        path = os.path.join(repo_root_path,
                            '.git/refs/heads/{}'.format(activeBranch))
        write_file(path, "{}\n".format(remote_sha1).encode())
        obj_type, commit_data = read_object(remote_sha1.strip())
        tree_sha1 = commit_data.decode().splitlines()[0][5:45]
        unpack_object(tree_sha1, repo_root_path, repo_root_path)
        return

    # non fast forward strategy
    intersection = set(local_commits).intersection(remote_commits)
    for commit_hash in remote_commits:
        if commit_hash in intersection:
            ancestor = commit_hash
            break

    # We need to find an ancestor and run 3-way merge on these files!
    # than we need to create a new tree and a commit object with 2 parents

    obj_type, ancestor_commit = read_object(ancestor)
    obj_type, a_commit = read_object(local_commits[0])
    obj_type, b_commit = read_object(remote_commits[0])
    # list for the 3 branches
    ancestor_entries = []
    a_entries = []
    b_entries = []
    # here we get a list in the following format [(filename, sha1), (filename, sha2), ...]
    get_subtree_entries(ancestor_commit.splitlines()[0][5:45].decode(), '',
                        ancestor_entries)
    get_subtree_entries(a_commit.splitlines()[0][5:45].decode(), '', a_entries)
    get_subtree_entries(b_commit.splitlines()[0][5:45].decode(), '', b_entries)

    merge = {}
    # wo go through each list and use the filename as key and create a list of hashed
    for e in ancestor_entries:
        if e[0] not in merge:
            merge[e[0]] = [e[1]]

    for e in a_entries:
        if e[0] not in merge:
            merge[e[0]] = [None, e[1]]
        else:
            merge[e[0]].append(e[1])

    for e in b_entries:
        if e[0] not in merge:
            merge[e[0]] = [None, None, e[1]]
        else:
            merge[e[0]].append(e[1])

    # if all hashes are the same, there is nothing we have to do
    # In case the second and third entry are not None, but the first one is: I am not sure if this case actually is possible
    conflict_files = []
    for f in merge:
        if len(merge[f]) == 2 and merge[f][0] != merge[f][1]:
            # if there are only two entries, the remote branch does not have the file and we will add it to the repository
            obj_type, data = read_object(merge[f][1])
            path = os.path.join(repo_root_path, f)
            if not os.path.exists(path):
                os.makedirs(os.path.dirname(path), exist_ok=True)
            write_file(path, data)
        elif merge[f][0] == None and merge[f][1] == None:
            # if there are three entries and the first two entries are none, the local repository does not have the file
            # so we add it.
            obj_type, data = read_object(merge[f][2])
            path = os.path.join(repo_root_path, f)
            if not os.path.exists(path):
                os.makedirs(os.path.dirname(path), exist_ok=True)
            write_file(path, data)
        elif len(set(merge[f])) == 3:
            # all entries are different, so 3-way merge
            # read the content of each file
            obj_type, base_data = read_object(merge[f][0])
            obj_type, local_data = read_object(merge[f][1])
            obj_type, remote_data = read_object(merge[f][2])
            #do the 3-way merge
            had_conflict, merged_lines = three_way_merge(
                base_data.decode().splitlines(),
                local_data.decode().splitlines(),
                remote_data.decode().splitlines(), "HEAD", merge[f][2])
            # writing the merged lines into the file
            with open(os.path.join(repo_root_path, f), 'w') as file:
                for line in merged_lines:
                    file.write('{}\n'.format(line))
            if had_conflict:
                # adding file to list, so that we don't add it to the index
                conflict_files.append(f)
                path = os.path.join(repo_root_path, '.git/ORIG_HEAD')
                write_file(path, '{}\n'.format(local_sha1).encode())
                path = os.path.join(repo_root_path, '.git/MERGE_HEAD')
                write_file(path,
                           '{}\n'.format(fetch_head[:40].decode()).encode())
                path = os.path.join(repo_root_path, '.git/MERGE_MODE')
                write_file(path, b'')
                path = os.path.join(repo_root_path, '.git/MERGE_MSG')
                if os.path.exists(path):
                    # append file name to conflict
                    with open(path, 'a') as f:
                        f.write('# \t{}'.format(f))
                else:
                    repo_name = read_repo_name()
                    if not repo_name.startswith('location:'):
                        # Need to check if the return is handled by the calling function
                        print('.git/name file has an error. Exiting...')
                        return False
                    tmp = repo_name.split('location:')[1].split(':')
                    network = tmp[0].strip()
                    user_key = tmp[1].strip()
                    git_factory = get_factory_contract(network)

                    repository = git_factory.functions.getRepository(
                        user_key).call()
                    write_file(
                        path,
                        'Merge branch \'{}\' of {} into {}\n\n# Conflicts\n# \t{}\n'
                        .format(source_branch, repository[2], activeBranch,
                                f).encode())

    # adding all the files to the index. TODO: can be more efficient if we add it to the previous loop
    files_to_add = []
    pwd = os.getcwd()
    os.chdir(repo_root_path)
    for path, subdirs, files in os.walk('.'):
        for name in files:
            # we don't want to add the files under .git to the index
            if not path.startswith('./.git') and name not in conflict_files:
                files_to_add.append(os.path.join(path, name)[2:])
    os.chdir(pwd)
    add(files_to_add)
    # creating a commit object with two parents
    if not had_conflict:
        commit('Merging {} into {}'.format(source_branch, activeBranch),
               parent1=local_commits[0],
               parent2=remote_commits[0])
Example #6
0
def pull():
    print('Pulling')
    changed, _, _ = get_status_workspace()
    # we are checking if there a changed files in the working copy or files staged which have not been committed.
    # if one case is true, pull won't be executed
    if len(changed) > 0 or not is_stage_empty():
        print("You have local changes. Add and commit those first")
        return

    repo_name = read_repo_name()
    if not repo_name.startswith('location:'):
        print('.git/name file has an error. Exiting...')
        return False
    tmp = repo_name.split('location:')[1].split(':')
    network = tmp[0].strip()
    user_key = tmp[1].strip()

    git_factory = get_factory_contract(network)
    repository = git_factory.functions.getRepository(user_key).call()

    if not repository[0]:
        print('No such repository')
        return
    git_repo_address = repository[2]

    activeBranch = get_current_branch_name()

    branch_contract = get_facet_contract("GitBranch", git_repo_address,
                                         network)

    branch = branch_contract.functions.getBranch(activeBranch).call()
    headCid = branch[1]

    remote_commits = get_all_remote_commits(headCid)

    #extract only the sha1 hash
    remote_commits_sha1 = [e['sha1'] for e in remote_commits]

    root_path = get_repo_root_path()
    local_commit = get_active_branch_hash()
    local_commits = get_all_local_commits(local_commit)

    if local_commits[0] == remote_commits_sha1[0]:
        print('Already up to date')
        return

    remote_to_local_difference = set(remote_commits_sha1) - set(local_commits)
    local_to_remote_difference = set(local_commits) - set(remote_commits_sha1)

    if len(remote_to_local_difference
           ) == 0 and len(local_to_remote_difference) > 0:
        print('You are ahead of remote branch')
        return
    elif len(remote_to_local_difference) == 0 and len(
            local_to_remote_difference) == 0:
        print('Nothing to pull')
        return
    elif len(local_to_remote_difference) == 0:
        # alright, we filtered what needs to be downloaded and unpacked
        # check clone on how to do that!
        remote_commits = list(
            filter(lambda x: x['sha1'] in remote_to_local_difference,
                   remote_commits))
        repo_name = root_path.split('/')[-1]
        #unpack files from the newest commit
        first = True
        for commit in remote_commits:
            unpack_files_of_commit(root_path, commit, first)
            first = False
        refs_path = os.path.join(root_path, '.git', 'refs', 'heads',
                                 activeBranch)
        write_file(refs_path, (remote_commits[0]['sha1'] + '\n').encode())

        # we are deleting all the files in the repo
        # there might be a better way, where we iterate over all of the files,
        # hash and compare the hashes. If there is no difference, leave as is, otherwise
        # overwrite. We would also need to check for files which are not in the index!
        # Maybe something at a later point in time :)
        # Same at checkout
        commit_entries = read_commit_entries(remote_commits[0]['sha1'])
        remove_files_from_repo()

        files_to_add = []

        for filename in commit_entries:
            object_type, data = read_object(commit_entries[filename])
            assert object_type == 'blob'
            write_file('{}/{}'.format(root_path, filename),
                       data.decode('utf-8'),
                       binary='')
            files_to_add.append(filename)

        # remove index file
        os.remove('{}/.git/index'.format(root_path))
        add(files_to_add)
Example #7
0
def clone(repo_name):
    """
    Cloning a remote repository on the local machine.

    repo_name: Repository to be cloned
    """

    user_address, repo_name = repo_name.split('/')
    network, user_address = user_address.split(':')

    if network != 'mumbai' and network != 'godwoken':
        print(f"Network {network} not supported")
        return

    git_factory = get_factory_contract(network)
    user_key = git_factory.functions.getUserRepoNameHash(
        user_address, repo_name).call()
    user_key = '0x{}'.format(binascii.hexlify(user_key).decode())

    repository = git_factory.functions.getRepository(user_key).call()

    if not repository[0] or repository[1] != repo_name:
        print('No such repository')
        return
    git_repo_address = repository[2]

    branch_contract = get_facet_contract("GitBranch", git_repo_address,
                                         network)

    branches = branch_contract.functions.getBranchNames().call()

    # string, which is going to be written into the .git/packed-refs file
    packed_refs_content = ""
    head_cids = set()

    main_cid = None

    default_branch_name = 'main' if 'main' in branches else branches[0]

    for branch_name in branches:
        branch = branch_contract.functions.getBranch(branch_name).call()
        head_cids.add(branch[1])
        packed_refs_content += '{} refs/remotes/origin/{}\n'.format(
            branch[1], branch_name)
        if branch_name == default_branch_name:
            main_cid = branch[1]

    print('Cloning {:s}'.format(repo_name))
    # initialize repository
    if not init(repo_name):
        return

    # get all remote commits
    for head_cid in head_cids:
        commits = get_all_remote_commits(head_cid)

        # replacing cid with sha1
        packed_refs_content = packed_refs_content.replace(
            head_cid, commits[0]['sha1'])

        # we are going to unpack only the files for the main branch. Commits and all
        # other git objects should be still downloaded
        if head_cid == main_cid:
            # write to refs
            main_ref_path = os.path.join(repo_name, '.git', 'refs', 'heads',
                                         default_branch_name)
            write_file(main_ref_path, (commits[0]['sha1'] + '\n').encode())
            head_ref_path = os.path.join(repo_name, '.git', 'HEAD')
            write_file(
                head_ref_path,
                ('ref: refs/heads/{}\n'.format(default_branch_name)).encode())
            first = True
        else:
            first = False

        for commit in commits:
            unpack_files_of_commit(repo_name, commit, first)
            first = False

    #chaning into repo, also for add function, in order to find the index file
    os.chdir(repo_name)

    # write packed-refs
    write_file('.git/packed-refs', packed_refs_content, binary='')

    write_file('.git/name', str.encode(f"location: {network}:{user_key}"))
    # collecting all files from the repo in order to create the index file
    files_to_add = []
    for path, subdirs, files in os.walk('.'):
        for name in files:
            # we don't want to add the files under .git to the index
            if not path.startswith('./.git'):
                files_to_add.append(os.path.join(path, name)[2:])
    add(files_to_add)
    print('{:s} cloned'.format(repo_name))
Example #8
0
def fetch(branchName):
    """
    Downloads commits and objects from the remote repository
    """
    repo_name = read_repo_name()
    if not repo_name.startswith('location:'):
        # Need to check if the return is handled by the calling function
        print('.git/name file has an error. Exiting...')
        return False
    tmp = repo_name.split('location:')[1].split(':')
    network = tmp[0].strip()
    user_key = tmp[1].strip()

    git_factory = get_factory_contract(network)
    active_branch = get_current_branch_name()

    repository = git_factory.functions.getRepository(user_key).call()

    if not repository[0]:
        print('No such repository')
        return

    git_repo_address = repository[2]

    branch_contract = get_facet_contract("GitBranch", git_repo_address,
                                         network)

    # fetch_data will contain tuples in the following format
    # (branch_name, head_cid, head_commit_sha1 of branch)
    fetch_data = []

    # if branchName is none, the user called git3 fetch
    # so we collect data from all branches
    if branchName is None:
        branches = branch_contract.functions.getBranchNames().call()
        for branch_name in branches:
            # returns tuple (bool, headcid)
            branch = branch_contract.functions.getBranch(branch_name).call()
            branch_commit_hash = get_branch_hash(branch_name)
            fetch_data.append((branch_name, branch[1], branch_commit_hash))
    else:
        # returns tuple (bool, headcid)
        branch = branch_contract.functions.getBranch(branchName).call()

        if not branch[1]:
            print('fatal: couldn\'t find remote ref {}'.format(branchName))
            return False

        branch_commit_hash = get_branch_hash(branch_name)
        fetch_data.append((branch_name, branch[1], branch_commit_hash))

    repo_root_path = get_repo_root_path()
    fetch_head_data = ''
    # get all remote commits
    for data in fetch_data:
        remote_commits = get_all_remote_commits(data[1])

        #extract only the sha1 hash
        remote_commits_sha1 = [e['sha1'] for e in remote_commits]

        local_commits = get_all_local_commits(data[2])

        if data[0] != active_branch:
            not_for_merge = 'not-for-merge'
        else:
            not_for_merge = ''

        # preparing FETCH_HEAD file content
        fetch_head_data = '{}{}\t{}\t{}\'{}\' of {}\n'.format(
            fetch_head_data, remote_commits_sha1[0], not_for_merge, 'branch ',
            data[0], git_repo_address)

        # write the remote commit to the refs/remotes/origin/[branchName] file
        write_file(
            os.path.join(repo_root_path, '.git/refs/remotes/origin/', data[0]),
            '{}\n'.format(remote_commits_sha1[0]), '')

        # check if we have any local commits
        # if local_commits length is zero, there are no local commits for that particular branch
        # so we need to download those!
        # if the first sha1 are equal, we don't need to download anything
        if len(local_commits
               ) > 0 and local_commits[0] == remote_commits_sha1[0]:
            continue

        remote_to_local_difference = set(remote_commits_sha1) - set(
            local_commits)

        # transfer the data from ipfs into git objects on the local machine
        for commit_hash in remote_to_local_difference:
            for commit in remote_commits:
                if commit['sha1'] == commit_hash:
                    unpack_files_of_commit(repo_root_path, commit, False)

    path = os.path.join(repo_root_path, '.git', 'FETCH_HEAD')
    write_file(path, fetch_head_data, '')