Ejemplo n.º 1
0
    def test_add_single_file(self, empty_objects_dir, delete_index_file):
        assert os.listdir(self.OBJECTS_DIR_PATH) == []

        add([self.file_names_and_content[0][0]])

        assert os.listdir(self.OBJECTS_DIR_PATH) == [self.file_hashes[0][:2]]
        assert os.listdir(os.path.join(self.OBJECTS_DIR_PATH, self.file_hashes[0][:2])) == [self.file_hashes[0][2:]]
Ejemplo n.º 2
0
def merge(source_branch):
    """
    Merges two branches. If the source_branch parameter is set, the source branch is merged into the current branch.
    If the parameter is not set, a merge from FETCH_HEAD is performed.
    """
    had_conflict = False
    repo_root_path = get_repo_root_path()
    # if no source branch for merge is give, we go through the FETCH_HEAD file
    if source_branch is None:
        fetch_head_path = os.path.join(repo_root_path, '.git/FETCH_HEAD')
        if not os.path.exists(fetch_head_path):
            print('Nothing to merge. Have you called fetch before?')
            return
        fetch_head_content = read_file(fetch_head_path).decode('utf-8')

        findings = re.findall('^([ABCDEFabcdef0-9]+)\s+branch (\w|\')+',
                              fetch_head_content)
        if len(findings) == 0:
            remote_sha1 = None
        else:
            remote_sha1 = findings[0][0]
    else:
        # otherwise we are looking for the refs file first.
        source_branch_head_path = os.path.join(repo_root_path,
                                               '.git/refs/heads/',
                                               source_branch)
        if not os.path.exists(source_branch_head_path):
            # if the refs file does not exist, we are having a look if the packed-refs file exits
            # git doesn't use the FETCH_HEAD file when a branch name is given!
            packed_refs_path = os.path.join(repo_root_path, '.git/packed-refs')
            if not os.path.exists(packed_refs_path):
                # if not, we are printing an error message and return
                remote_sha1 = None
            # otherwise we read the packed-refs file
            packed_refs_content = read_file(packed_refs_path).decode('utf-8')
            # and read the commit hash
            findings = re.findall(
                '([ABCDEFabcdef0-9]*) refs\/remotes\/origin\/{}'.format(
                    source_branch), packed_refs_content)
            if len(findings) == 0:
                remote_sha1 = None
            else:
                remote_sha1 = findings[0]
        else:
            # if the file exists, we read the sha1 from it
            remote_sha1 = read_file(source_branch_head_path).decode('utf-8')

    if remote_sha1 is None:
        print('merge: {} - not something we can merge'.format(source_branch))
        exit(1)

    activeBranch = get_current_branch_name()
    local_sha1 = get_active_branch_hash()

    remote_sha1 = remote_sha1.strip()
    local_sha1 = local_sha1.strip()

    if remote_sha1 == local_sha1:
        return
    remote_commits = get_all_local_commits(remote_sha1)
    local_commits = get_all_local_commits(local_sha1)

    difference = set(local_commits) - set(remote_commits)

    if len(difference) == 0:
        #fast forward strategy
        path = os.path.join(repo_root_path,
                            '.git/refs/heads/{}'.format(activeBranch))
        write_file(path, "{}\n".format(remote_sha1).encode())
        obj_type, commit_data = read_object(remote_sha1.strip())
        tree_sha1 = commit_data.decode().splitlines()[0][5:45]
        unpack_object(tree_sha1, repo_root_path, repo_root_path)
        return

    # non fast forward strategy
    intersection = set(local_commits).intersection(remote_commits)
    for commit_hash in remote_commits:
        if commit_hash in intersection:
            ancestor = commit_hash
            break

    # We need to find an ancestor and run 3-way merge on these files!
    # than we need to create a new tree and a commit object with 2 parents

    obj_type, ancestor_commit = read_object(ancestor)
    obj_type, a_commit = read_object(local_commits[0])
    obj_type, b_commit = read_object(remote_commits[0])
    # list for the 3 branches
    ancestor_entries = []
    a_entries = []
    b_entries = []
    # here we get a list in the following format [(filename, sha1), (filename, sha2), ...]
    get_subtree_entries(ancestor_commit.splitlines()[0][5:45].decode(), '',
                        ancestor_entries)
    get_subtree_entries(a_commit.splitlines()[0][5:45].decode(), '', a_entries)
    get_subtree_entries(b_commit.splitlines()[0][5:45].decode(), '', b_entries)

    merge = {}
    # wo go through each list and use the filename as key and create a list of hashed
    for e in ancestor_entries:
        if e[0] not in merge:
            merge[e[0]] = [e[1]]

    for e in a_entries:
        if e[0] not in merge:
            merge[e[0]] = [None, e[1]]
        else:
            merge[e[0]].append(e[1])

    for e in b_entries:
        if e[0] not in merge:
            merge[e[0]] = [None, None, e[1]]
        else:
            merge[e[0]].append(e[1])

    # if all hashes are the same, there is nothing we have to do
    # In case the second and third entry are not None, but the first one is: I am not sure if this case actually is possible
    conflict_files = []
    for f in merge:
        if len(merge[f]) == 2 and merge[f][0] != merge[f][1]:
            # if there are only two entries, the remote branch does not have the file and we will add it to the repository
            obj_type, data = read_object(merge[f][1])
            path = os.path.join(repo_root_path, f)
            if not os.path.exists(path):
                os.makedirs(os.path.dirname(path), exist_ok=True)
            write_file(path, data)
        elif merge[f][0] == None and merge[f][1] == None:
            # if there are three entries and the first two entries are none, the local repository does not have the file
            # so we add it.
            obj_type, data = read_object(merge[f][2])
            path = os.path.join(repo_root_path, f)
            if not os.path.exists(path):
                os.makedirs(os.path.dirname(path), exist_ok=True)
            write_file(path, data)
        elif len(set(merge[f])) == 3:
            # all entries are different, so 3-way merge
            # read the content of each file
            obj_type, base_data = read_object(merge[f][0])
            obj_type, local_data = read_object(merge[f][1])
            obj_type, remote_data = read_object(merge[f][2])
            #do the 3-way merge
            had_conflict, merged_lines = three_way_merge(
                base_data.decode().splitlines(),
                local_data.decode().splitlines(),
                remote_data.decode().splitlines(), "HEAD", merge[f][2])
            # writing the merged lines into the file
            with open(os.path.join(repo_root_path, f), 'w') as file:
                for line in merged_lines:
                    file.write('{}\n'.format(line))
            if had_conflict:
                # adding file to list, so that we don't add it to the index
                conflict_files.append(f)
                path = os.path.join(repo_root_path, '.git/ORIG_HEAD')
                write_file(path, '{}\n'.format(local_sha1).encode())
                path = os.path.join(repo_root_path, '.git/MERGE_HEAD')
                write_file(path,
                           '{}\n'.format(fetch_head[:40].decode()).encode())
                path = os.path.join(repo_root_path, '.git/MERGE_MODE')
                write_file(path, b'')
                path = os.path.join(repo_root_path, '.git/MERGE_MSG')
                if os.path.exists(path):
                    # append file name to conflict
                    with open(path, 'a') as f:
                        f.write('# \t{}'.format(f))
                else:
                    repo_name = read_repo_name()
                    if not repo_name.startswith('location:'):
                        # Need to check if the return is handled by the calling function
                        print('.git/name file has an error. Exiting...')
                        return False
                    tmp = repo_name.split('location:')[1].split(':')
                    network = tmp[0].strip()
                    user_key = tmp[1].strip()
                    git_factory = get_factory_contract(network)

                    repository = git_factory.functions.getRepository(
                        user_key).call()
                    write_file(
                        path,
                        'Merge branch \'{}\' of {} into {}\n\n# Conflicts\n# \t{}\n'
                        .format(source_branch, repository[2], activeBranch,
                                f).encode())

    # adding all the files to the index. TODO: can be more efficient if we add it to the previous loop
    files_to_add = []
    pwd = os.getcwd()
    os.chdir(repo_root_path)
    for path, subdirs, files in os.walk('.'):
        for name in files:
            # we don't want to add the files under .git to the index
            if not path.startswith('./.git') and name not in conflict_files:
                files_to_add.append(os.path.join(path, name)[2:])
    os.chdir(pwd)
    add(files_to_add)
    # creating a commit object with two parents
    if not had_conflict:
        commit('Merging {} into {}'.format(source_branch, activeBranch),
               parent1=local_commits[0],
               parent2=remote_commits[0])
Ejemplo n.º 3
0
def add_file_to_index():
    add(['Readme.md'])
    yield
Ejemplo n.º 4
0
def pull():
    print('Pulling')
    changed, _, _ = get_status_workspace()
    # we are checking if there a changed files in the working copy or files staged which have not been committed.
    # if one case is true, pull won't be executed
    if len(changed) > 0 or not is_stage_empty():
        print("You have local changes. Add and commit those first")
        return

    repo_name = read_repo_name()
    if not repo_name.startswith('location:'):
        print('.git/name file has an error. Exiting...')
        return False
    tmp = repo_name.split('location:')[1].split(':')
    network = tmp[0].strip()
    user_key = tmp[1].strip()

    git_factory = get_factory_contract(network)
    repository = git_factory.functions.getRepository(user_key).call()

    if not repository[0]:
        print('No such repository')
        return
    git_repo_address = repository[2]

    activeBranch = get_current_branch_name()

    branch_contract = get_facet_contract("GitBranch", git_repo_address,
                                         network)

    branch = branch_contract.functions.getBranch(activeBranch).call()
    headCid = branch[1]

    remote_commits = get_all_remote_commits(headCid)

    #extract only the sha1 hash
    remote_commits_sha1 = [e['sha1'] for e in remote_commits]

    root_path = get_repo_root_path()
    local_commit = get_active_branch_hash()
    local_commits = get_all_local_commits(local_commit)

    if local_commits[0] == remote_commits_sha1[0]:
        print('Already up to date')
        return

    remote_to_local_difference = set(remote_commits_sha1) - set(local_commits)
    local_to_remote_difference = set(local_commits) - set(remote_commits_sha1)

    if len(remote_to_local_difference
           ) == 0 and len(local_to_remote_difference) > 0:
        print('You are ahead of remote branch')
        return
    elif len(remote_to_local_difference) == 0 and len(
            local_to_remote_difference) == 0:
        print('Nothing to pull')
        return
    elif len(local_to_remote_difference) == 0:
        # alright, we filtered what needs to be downloaded and unpacked
        # check clone on how to do that!
        remote_commits = list(
            filter(lambda x: x['sha1'] in remote_to_local_difference,
                   remote_commits))
        repo_name = root_path.split('/')[-1]
        #unpack files from the newest commit
        first = True
        for commit in remote_commits:
            unpack_files_of_commit(root_path, commit, first)
            first = False
        refs_path = os.path.join(root_path, '.git', 'refs', 'heads',
                                 activeBranch)
        write_file(refs_path, (remote_commits[0]['sha1'] + '\n').encode())

        # we are deleting all the files in the repo
        # there might be a better way, where we iterate over all of the files,
        # hash and compare the hashes. If there is no difference, leave as is, otherwise
        # overwrite. We would also need to check for files which are not in the index!
        # Maybe something at a later point in time :)
        # Same at checkout
        commit_entries = read_commit_entries(remote_commits[0]['sha1'])
        remove_files_from_repo()

        files_to_add = []

        for filename in commit_entries:
            object_type, data = read_object(commit_entries[filename])
            assert object_type == 'blob'
            write_file('{}/{}'.format(root_path, filename),
                       data.decode('utf-8'),
                       binary='')
            files_to_add.append(filename)

        # remove index file
        os.remove('{}/.git/index'.format(root_path))
        add(files_to_add)
Ejemplo n.º 5
0
def main():
    parser = argparse.ArgumentParser()
    sub_parsers = parser.add_subparsers(dest='command', metavar='command')
    sub_parsers.required = True

    # Add
    sub_parser = sub_parsers.add_parser('add', help='add file(s) to index')
    sub_parser.add_argument('paths',
                            nargs='+',
                            metavar='path',
                            help='path(s) of files to add')

    # Branch
    sub_parser = sub_parsers.add_parser('branch',
                                        help='List and Create branches')
    sub_parser.add_argument('-r',
                            '--remotes',
                            action='store_true',
                            help='act on remote-tracking branches')
    sub_parser = sub_parser.add_argument(
        'branchname',
        metavar='<branchname>',
        nargs='?',
        help='Create a new branch named <branchname>')

    # Cat-file
    sub_parser = sub_parsers.add_parser('cat-file',
                                        help='display contents of object')
    valid_modes = ['commit', 'tree', 'blob', 'size', 'type', 'pretty']
    sub_parser.add_argument(
        'mode',
        choices=valid_modes,
        help='object type (commit, tree, blob) or display mode (size, '
        'type, pretty)')
    sub_parser.add_argument(
        'hash_prefix', help='SHA-1 hash (or hash prefix) of object to display')

    # Checkout
    sub_parser = sub_parsers.add_parser('checkout', help='Switch branches')
    sub_parser.add_argument('-b',
                            action='store_true',
                            help='Create a new branch with name new_branch')
    sub_parser.add_argument('branch',
                            metavar='<branch>',
                            help='Checkout to <branch>')

    # Commit
    sub_parser = sub_parsers.add_parser(
        'commit',
        help='commit current state of index to current active branch')
    sub_parser.add_argument(
        '-a',
        '--author',
        help='commit author in format "A U Thor <*****@*****.**>" '
        '(uses GIT_AUTHOR_NAME and GIT_AUTHOR_EMAIL environment '
        'variables by default)')
    sub_parser.add_argument('-m',
                            '--message',
                            required=True,
                            help='text of commit message')

    # Create
    sub_parser = sub_parsers.add_parser('create',
                                        help='create your remote repository')
    valid_networks = ['godwoken', 'mumbai']
    sub_parser.add_argument(
        '-n',
        '--network',
        required=True,
        choices=valid_networks,
        help='Choose which network to interact with. Godwoken Testnet and'
        ' Mumbai are currently supported.')

    # Clone
    sub_parser = sub_parsers.add_parser('clone',
                                        help='create your remote repository')
    sub_parser.add_argument('name', help='name of repository to clone')

    # Diff
    sub_parser = sub_parsers.add_parser(
        'diff',
        help='show diff of files changed (between index and working '
        'copy)')
    sub_parser.add_argument(
        '--staged',
        action='store_true',
        help='This form is to view the changes you staged for the '
        'next commit relative to the HEAD commmit.')

    # Hash-object
    sub_parser = sub_parsers.add_parser(
        'hash-object',
        help='hash contents of given path (and optionally write to '
        'object store)')
    sub_parser.add_argument('path', help='path of file to hash')
    sub_parser.add_argument('-t',
                            choices=['commit', 'tree', 'blob'],
                            default='blob',
                            dest='type',
                            help='type of object (default %(default)r)')
    sub_parser.add_argument(
        '-w',
        action='store_true',
        dest='write',
        help='write object to object store (as well as printing hash)')

    # init
    sub_parser = sub_parsers.add_parser('init', help='initialize a new repo')
    sub_parser.add_argument('repo',
                            nargs='?',
                            default='.',
                            help='directory name for new repo')

    #sub_parser = sub_parsers.add_parser('ls-files',
    #help='list files in index')
    #sub_parser.add_argument('-s', '--stage', action='store_true',
    #help='show object details (mode, hash, and stage number) in '
    #'addition to path')

    # Fetch
    sub_parser = sub_parsers.add_parser(
        'fetch', help='Download object and refs from another repository')
    sub_parser.add_argument('branch', nargs='?', help='branch data to fetch')

    # Get-Address
    sub_parser = sub_parsers.add_parser('get-address',
                                        help='Get Matic wallet address')

    # Merge
    sub_parser = sub_parsers.add_parser(
        'merge', help='Join two or more development histories together')
    sub_parser.add_argument('sourceBranch',
                            nargs='?',
                            help='branch to be merged into the current branch')

    # Push
    sub_parser = sub_parsers.add_parser(
        'push', help='push current active branch to given git server URL')
    #sub_parser.add_argument('git_url',
    #        help='URL of git repo, eg: https://github.com/benhoyt/pygit.git')
    #sub_parser.add_argument('-p', '--password',
    #help='password to use for authentication (uses GIT_PASSWORD '
    #'environment variable by default)')
    #sub_parser.add_argument('-u', '--username',
    #help='username to use for authentication (uses GIT_USERNAME '
    #'environment variable by default)')

    # Pull
    sub_parser = sub_parsers.add_parser('pull', help='pulls remote commits')

    # Status
    sub_parser = sub_parsers.add_parser('status',
                                        help='show status of working copy')

    args = parser.parse_args()
    if args.command == 'add':
        add(args.paths)
    elif args.command == 'branch':
        if args.branchname:
            createBranch(args.command, args.branchname)
        else:
            listBranches(args.remotes)
    elif args.command == 'checkout':
        if args.b is False:
            checkout(args.branch)
        else:
            createBranch('checkout', args.branch)
    elif args.command == 'cat-file':
        try:
            cat_file(args.mode, args.hash_prefix)
        except ValueError as error:
            print(error, file=sys.stderr)
            sys.exit(1)
    elif args.command == 'commit':
        commit(args.message, author=args.author)
    elif args.command == 'create':
        create(args.network)
    elif args.command == 'clone':
        clone(args.name)
    elif args.command == 'diff':
        diff(args.staged)
    elif args.command == 'fetch':
        fetch(args.branch)
    elif args.command == 'get-address':
        address = getAddress()
        print('Your address is: {}'.format(address))
    elif args.command == 'hash-object':
        hashObject(read_file(args.path), args.type, write=args.write)
    elif args.command == 'init':
        init(args.repo)
    elif args.command == 'ls-files':
        ls_files(details=args.stage)
    elif args.command == 'merge':
        merge(args.sourceBranch)
    elif args.command == 'push':
        push()
    elif args.command == 'pull':
        pull()
    elif args.command == 'status':
        status()
    else:
        assert False, 'unexpected command {!r}'.format(args.command)
Ejemplo n.º 6
0
def checkout(branch):
    if branch is None:
        print('fatal: Branch name not given.')
        exit(1)
    try:
        repo_root_path = get_repo_root_path()
    except NoRepositoryError as nre:
        print(nre)
        exit(1)

    active_branch = get_active_branch()

    if active_branch == branch:
        print('Already on \'{}\''.format(branch))
        exit(0)

    # boolen to see if the commit hash is taken from the packed-refs file
    from_packed_refs = False
    target_commit_hash = None

    # check if branch exists
    # first we check if git/refs/heads exists. If it does exist
    if os.path.isfile('{}/.git/refs/heads/{}'.format(repo_root_path, branch)):
        # we load the commit hash
        target_commit_hash = read_file('{}/.git/refs/heads/{}'.format(
            repo_root_path, branch)).decode("utf-8").strip()
    else:
        # if it doesn't exist, we check if the FETCH_HEAD file exists
        if os.path.isfile('{}/.git/FETCH_HEAD'.format(repo_root_path)):
            fetch_head_content = read_file(
                '{}/.git/FETCH_HEAD'.format(repo_root_path)).decode('utf-8')
            target_commit_hash = fetch_head_content.split(
                'branch \'{}\''.format(branch))[0].split('\n')[-1].split(
                    '\t')[0].strip()
        # if it does not exist, we check if packed-refs exists
        elif os.path.isfile('{}/.git/packed-refs'.format(repo_root_path)):
            # in case it exists, we check if the branch exists in packed-refs
            packed_refs_content = read_file(
                '{}/.git/packed-refs'.format(repo_root_path)).decode("utf-8")
            if branch in packed_refs_content:
                # get the commit hash
                from_packed_refs = True
                target_commit_hash = packed_refs_content.split(
                    'refs/remotes/origin/{}\n'.format(branch))[0].split(
                        '\n')[-1].strip()
        else:
            # if does not exist, we exit
            print(
                'error: pathspec \'{}\' did not match any file(s) known to git3'
                .format(branch))
            exit(1)

    current_commit_hash = read_file('{}/.git/refs/heads/{}'.format(
        repo_root_path, active_branch)).decode("utf-8").strip()

    # if the commit hash has been taken from the packed-refs, we need to write
    # the .git/refs/heads/<branch> file
    if from_packed_refs:
        print(
            'Branch \'{}\' set up to track remote branch \'{}\' from \'origin\'.'
            .format(branch, branch))
        write_file('{}/.git/refs/heads/{}'.format(repo_root_path, branch),
                   target_commit_hash,
                   binary='')

    if current_commit_hash == target_commit_hash:
        # switch branch when the hashes are the same.
        # we don't have to do anything else
        write_file('{}/.git/HEAD'.format(repo_root_path, branch),
                   'ref: refs/heads/{}'.format(branch),
                   binary='')
        exit(0)

    changed, new, deleted = get_status_workspace()
    if len(changed) is not 0 or len(new) is not 0 or len(deleted) is not 0:
        print_checkout_error(changed, new, deleted)

    changed, new, deleted = get_status_commit()
    if len(changed) is not 0 or len(new) is not 0 or len(deleted) is not 0:
        print_checkout_error(changed, new, deleted)

    commit_entries = read_commit_entries(target_commit_hash)

    # we are deleting all the files in the repo
    # there might be a better way, where we iterate over all of the files,
    # hash and compare the hashes. If there is no difference, leave as is, otherwise
    # overwrite. We would also need to check for files which are not in the index!
    # Maybe something at a later point in time :)
    remove_files_from_repo()

    files_to_add = []

    for filename in commit_entries:
        object_type, data = read_object(commit_entries[filename])
        assert object_type == 'blob'
        write_file('{}/{}'.format(repo_root_path, filename),
                   data.decode('utf-8'),
                   binary='')
        files_to_add.append(filename)

    # remove index file
    os.remove('{}/.git/index'.format(repo_root_path))
    add(files_to_add)
    update_HEAD(repo_root_path, branch)
    print('Switched to branch \'{}\''.format(branch))
Ejemplo n.º 7
0
 def test_adding_non_existing_file(self, empty_objects_dir):
     with pytest.raises(SystemExit) as pytest_wrapped_e:
         add([self.non_existing_file])
     assert pytest_wrapped_e.type == SystemExit
     assert pytest_wrapped_e.value.code == 1
     
Ejemplo n.º 8
0
 def test_adding_in_non_reposiotry_dir(self, move_to_root_and_back):
     with pytest.raises(SystemExit) as pytest_wrapped_e:
         add(self.file_names_and_content)
     assert pytest_wrapped_e.type == SystemExit
     assert pytest_wrapped_e.value.code == 1
Ejemplo n.º 9
0
def clone(repo_name):
    """
    Cloning a remote repository on the local machine.

    repo_name: Repository to be cloned
    """

    user_address, repo_name = repo_name.split('/')
    network, user_address = user_address.split(':')

    if network != 'mumbai' and network != 'godwoken':
        print(f"Network {network} not supported")
        return

    git_factory = get_factory_contract(network)
    user_key = git_factory.functions.getUserRepoNameHash(
        user_address, repo_name).call()
    user_key = '0x{}'.format(binascii.hexlify(user_key).decode())

    repository = git_factory.functions.getRepository(user_key).call()

    if not repository[0] or repository[1] != repo_name:
        print('No such repository')
        return
    git_repo_address = repository[2]

    branch_contract = get_facet_contract("GitBranch", git_repo_address,
                                         network)

    branches = branch_contract.functions.getBranchNames().call()

    # string, which is going to be written into the .git/packed-refs file
    packed_refs_content = ""
    head_cids = set()

    main_cid = None

    default_branch_name = 'main' if 'main' in branches else branches[0]

    for branch_name in branches:
        branch = branch_contract.functions.getBranch(branch_name).call()
        head_cids.add(branch[1])
        packed_refs_content += '{} refs/remotes/origin/{}\n'.format(
            branch[1], branch_name)
        if branch_name == default_branch_name:
            main_cid = branch[1]

    print('Cloning {:s}'.format(repo_name))
    # initialize repository
    if not init(repo_name):
        return

    # get all remote commits
    for head_cid in head_cids:
        commits = get_all_remote_commits(head_cid)

        # replacing cid with sha1
        packed_refs_content = packed_refs_content.replace(
            head_cid, commits[0]['sha1'])

        # we are going to unpack only the files for the main branch. Commits and all
        # other git objects should be still downloaded
        if head_cid == main_cid:
            # write to refs
            main_ref_path = os.path.join(repo_name, '.git', 'refs', 'heads',
                                         default_branch_name)
            write_file(main_ref_path, (commits[0]['sha1'] + '\n').encode())
            head_ref_path = os.path.join(repo_name, '.git', 'HEAD')
            write_file(
                head_ref_path,
                ('ref: refs/heads/{}\n'.format(default_branch_name)).encode())
            first = True
        else:
            first = False

        for commit in commits:
            unpack_files_of_commit(repo_name, commit, first)
            first = False

    #chaning into repo, also for add function, in order to find the index file
    os.chdir(repo_name)

    # write packed-refs
    write_file('.git/packed-refs', packed_refs_content, binary='')

    write_file('.git/name', str.encode(f"location: {network}:{user_key}"))
    # collecting all files from the repo in order to create the index file
    files_to_add = []
    for path, subdirs, files in os.walk('.'):
        for name in files:
            # we don't want to add the files under .git to the index
            if not path.startswith('./.git'):
                files_to_add.append(os.path.join(path, name)[2:])
    add(files_to_add)
    print('{:s} cloned'.format(repo_name))