Пример #1
0
    def commit(self, commit_message, commit_by, commit_datetime=None) -> str:
        if not self.have_active_commit(): raise Exception()

        current_changes = json.loads(
            sfs.file_get_contents(
                sfs.cpjoin(self.base_path, 'active_commit_changes')))
        active_files = json.loads(
            sfs.file_get_contents(
                sfs.cpjoin(self.base_path, 'active_commit_files')))

        if current_changes == []: raise Exception('Empty commit')

        # Create and store the file tree
        tree_root = self.write_dir_tree(self.build_dir_tree(active_files))

        # If no commit message is passed store an indication of what was changed
        if commit_message == '':
            new_item = next((change for change in current_changes
                             if change['status'] in ['new', 'changed']), None)
            deleted_item = next((change for change in current_changes
                                 if change['status'] == 'deleted'), None)

            commit_message = "(Generated message)\n"
            if new_item is not None:
                commit_message += new_item['status'] + '    ' + new_item[
                    'path'] + '\n'
            if deleted_item is not None:
                commit_message += deleted_item[
                    'status'] + '    ' + deleted_item['path'] + '\n'
            if len(current_changes) > 2: commit_message += '...'

        # Commit timestamp
        commit_datetime = datetime.utcnow(
        ) if commit_datetime is None else commit_datetime
        commit_timestamp = commit_datetime.strftime("%d-%m-%Y %H:%M:%S:%f")

        # Create commit
        commit_object_hash = self.write_index_object(
            'commit', {
                'parent': self.get_head(),
                'utc_date_time': commit_timestamp,
                'commit_by': commit_by,
                'commit_message': commit_message,
                'tree_root': tree_root,
                'changes': current_changes
            })

        #update head, write plus move for atomicity
        sfs.file_put_contents(sfs.cpjoin(self.base_path, 'new_head'),
                              bytes(commit_object_hash, encoding='utf8'))
        os.rename(sfs.cpjoin(self.base_path, 'new_head'),
                  sfs.cpjoin(self.base_path, 'head'))

        #and clean up working state
        os.remove(sfs.cpjoin(self.base_path, 'active_commit_changes'))
        os.remove(sfs.cpjoin(self.base_path, 'active_commit_files'))
        sfs.ignore(os.remove, sfs.cpjoin(self.base_path, 'gc_log'))
        os.remove(sfs.cpjoin(self.base_path, 'active_commit'))

        return commit_object_hash
 def make_client(name):
     make_dirs_if_dont_exist(DATA_DIR + name + '/.shttpfs')
     file_put_contents(
         DATA_DIR + name + '/.shttpfs/client_configuration.json',
         bytes(json.dumps({
             "server_domain": "none",
             "user": "******",
             "repository": repo_name,
             "private_key": private_key
         }),
               encoding='utf8'))
Пример #3
0
    def new_backup(self, src: str):
        """ Create a new backup file allocation """

        backup_id_file = p.join(self.backup_dir, '.bk_idx')

        backup_num = int(file_or_default(backup_id_file, b'1'))
        backup_name = str(backup_num) + "_" + os.path.basename(src)
        backup_num += 1

        file_put_contents(backup_id_file,
                          bytes(str(backup_num), encoding='utf8'))
        return p.join(self.backup_dir, backup_name)
    def test_hash_file(self):
        """ Test that file hash returns the correct result. """

        make_data_dir()

        file_path = cpjoin(DATA_DIR, 'test')
        file_put_contents(file_path, b'some file contents')

        expected_result = 'cf57fcf9d6d7fb8fd7d8c30527c8f51026aa1d99ad77cc769dd0c757d4fe8667'
        result = hash_file(file_path)

        self.assertEqual(expected_result,
                         result,
                         msg='Hashes are not the same')

        delete_data_dir()
Пример #5
0
    def write_index_object(self, object_type: str, contents: Dict[str,
                                                                  Any]) -> str:
        new_object: indexObject = {'type': object_type}
        new_object.update(contents)  #type: ignore
        serialised = json.dumps(new_object)
        object_hash = hashlib.sha256(bytes(serialised,
                                           encoding='utf8')).hexdigest()
        target_base = sfs.cpjoin(self.base_path, 'index', object_hash[:2])
        if os.path.isfile(sfs.cpjoin(target_base, object_hash[2:])):
            return object_hash

        # log items which do not exist for garbage collection
        self.gc_log_item(object_type, object_hash)

        #----
        sfs.make_dirs_if_dont_exist(target_base)
        sfs.file_put_contents(sfs.cpjoin(target_base, object_hash[2:]),
                              bytes(serialised, encoding='utf8'))
        return object_hash
Пример #6
0
    def begin(self) -> None:
        if self.have_active_commit(): raise Exception()

        active_files = {}
        head = self.get_head()

        if head != 'root':
            commit = self.read_commit_index_object(head)
            active_files = self.flatten_dir_tree(
                self.read_dir_tree(commit['tree_root']))

        # Active commit files stores all of the files which will be in this revision,
        # including ones carried over from the previous revision
        sfs.file_put_contents(
            sfs.cpjoin(self.base_path, 'active_commit_files'),
            bytes(json.dumps(active_files), encoding='utf8'))

        # Active commit changes stores a log of files which have been added, changed
        # or deleted in this revision
        sfs.file_put_contents(
            sfs.cpjoin(self.base_path, 'active_commit_changes'),
            bytes(json.dumps([]), encoding='utf8'))

        # Store that there is an active commit
        sfs.file_put_contents(sfs.cpjoin(self.base_path, 'active_commit'),
                              b'true')
Пример #7
0
    def do_action(self, command: dict, journal: bool = True):
        """ Implementation for declarative file operations. """

        # if self.journal is None: raise Exception('Must call begin first')

        cmd = 0
        src = 1
        path = 1
        data = 2
        dst = 2

        if journal is True:
            self.journal.write(json.dumps(command['undo']) +
                               "\n")  # type: ignore
            self.journal.flush()  # type: ignore

        d = command['do']
        if d[cmd] == 'copy': shutil.copy(d[src], d[dst])
        elif d[cmd] == 'move': shutil.move(d[src], d[dst])
        elif d[cmd] == 'backup': shutil.move(d[src], self.new_backup(d[src]))
        elif d[cmd] == 'write':
            if callable(d[data]): d[data](d[path])
            else: file_put_contents(d[path], d[data])
    def test_get_changes_since(self):
        file_put_contents(cpjoin(DATA_DIR, 'test 1'), b'test')
        file_put_contents(cpjoin(DATA_DIR, 'test 2'), b'test 1')
        file_put_contents(cpjoin(DATA_DIR, 'test 3'), b'test 2')

        #==================
        data_store = versioned_storage(DATA_DIR)
        data_store.begin()
        data_store.fs_put_from_file(cpjoin(DATA_DIR, 'test 1'),
                                    {'path': '/test/path'})
        id1 = data_store.commit('test msg', 'test user')

        changes = data_store.get_changes_since('root', data_store.get_head())

        self.assertEqual(
            changes, {
                '/test/path': {
                    'hash':
                    '9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08',
                    'path': '/test/path',
                    'status': 'new'
                }
            })

        #==================
        data_store.begin()
        data_store.fs_put_from_file(cpjoin(DATA_DIR, 'test 2'),
                                    {'path': '/another/path'})
        data_store.fs_put_from_file(cpjoin(DATA_DIR, 'test 3'),
                                    {'path': '/yet/another/path'})
        data_store.commit('test msg', 'test user')

        changes = data_store.get_changes_since(id1, data_store.get_head())

        self.assertEqual(
            changes, {
                '/another/path': {
                    'hash':
                    'f67213b122a5d442d2b93bda8cc45c564a70ec5d2a4e0e95bb585cf199869c98',
                    'path': '/another/path',
                    'status': 'new'
                },
                '/yet/another/path': {
                    'hash':
                    'dec2e4bc4992314a9c9a51bbd859e1b081b74178818c53c19d18d6f761f5d804',
                    'path': '/yet/another/path',
                    'status': 'new'
                }
            })
    def test_rollback(self):
        file_put_contents(cpjoin(DATA_DIR, 'test 1'), b'test')
        file_put_contents(cpjoin(DATA_DIR, 'test 2'), b'test')
        file_put_contents(cpjoin(DATA_DIR, 'test 3'), b'test 2')

        #==================
        data_store = versioned_storage(DATA_DIR)

        data_store.begin()
        data_store.fs_put_from_file(cpjoin(DATA_DIR, 'test 1'),
                                    {'path': '/test/path'})
        data_store.commit('test msg', 'test user')

        data_store.begin()
        data_store.fs_put_from_file(cpjoin(DATA_DIR, 'test 2'),
                                    {'path': '/another/path'})
        data_store.fs_put_from_file(cpjoin(DATA_DIR, 'test 3'),
                                    {'path': '/yet/another/path'})
        data_store.rollback()

        self.assertEqual(os.listdir(cpjoin(DATA_DIR, 'files')), ['9f'])
        self.assertEqual(
            os.listdir(cpjoin(DATA_DIR, 'files', '9f')),
            ['86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08'])
Пример #10
0
 def update_system_file(self, file_name: str, callback) -> None:
     contents = json.loads(
         sfs.file_get_contents(sfs.cpjoin(self.base_path, file_name)))
     contents = callback(contents)
     sfs.file_put_contents(sfs.cpjoin(self.base_path, file_name),
                           bytes(json.dumps(contents), encoding='utf8'))
    def test_system(self):
        test_content_1 = b'test file jhgrtelkj'
        test_content_2 = b''.join([struct.pack('B', i) for i in range(256)
                                   ])  # binary string with all byte values
        test_content_2_2 = test_content_2[::-1]
        test_content_3 = b'test content 3 sdavcxreiltlj'
        test_content_4 = b'test content 4 fsdwqtruytuyt'
        test_content_5 = b'test content 5 .,myuitouys'

        #=========
        setup()
        setup_client('client1')

        #==================================================
        # test_initial commit
        #==================================================
        file_put_contents(DATA_DIR + 'client1/test1', test_content_1)
        file_put_contents(DATA_DIR + 'client1/test2',
                          test_content_2)  # test with a binary blob
        #file_put_contents(DATA_DIR + u'client1/GȞƇØzǠ☸k😒♭',  test_content_2) # test unicode file name

        # commit the files
        session_token = client.authenticate()
        print(session_token)
        version_id = client.commit(session_token, 'test commit')

        self.assertNotEqual(version_id, None)

        # commit message should be in log
        req_result = client.get_versions(session_token)[0]
        self.assertEqual(
            'test commit',
            json.loads(req_result)['versions'][0]['commit_message'])

        # file should show up in list_changes
        req_result = client.get_files_in_version(session_token, version_id)[0]
        self.assertTrue('/test1' in json.loads(req_result)['files'])
        self.assertTrue('/test2' in json.loads(req_result)['files'])

        # file should exist in server fs
        self.assertEqual(
            test_content_1,
            file_get_contents(DATA_DIR + 'server/files/' +
                              get_server_file_name(test_content_1)))
        self.assertEqual(
            test_content_2,
            file_get_contents(DATA_DIR + 'server/files/' +
                              get_server_file_name(test_content_2)))

        # NOTE As change detection is done using access timestamps, need a
        # delay between tests to make sure changes are detected correctly
        time.sleep(0.5)

        #==================================================
        # test update
        #==================================================
        setup_client('client2')
        session_token = client.authenticate()
        client.update(session_token)
        self.assertEqual(test_content_1,
                         file_get_contents(DATA_DIR + 'client2/test1'))
        self.assertEqual(test_content_2,
                         file_get_contents(DATA_DIR + 'client2/test2'))

        time.sleep(0.5)  # See above

        #==================================================
        # test delete and add
        #==================================================
        os.unlink(DATA_DIR + 'client2/test1')
        file_put_contents(DATA_DIR + 'client2/test2',
                          test_content_2_2)  # test changing an existing file
        file_put_contents(DATA_DIR + 'client2/test3', test_content_3)
        file_put_contents(DATA_DIR + 'client2/test4', test_content_4)

        setup_client('client2')
        session_token = client.authenticate()
        version_id = client.commit(session_token,
                                   'create and delete some files')

        # check change is reflected correctly in the commit log
        req_result = client.get_changes_in_version(session_token,
                                                   version_id)[0]
        res_index = {v['path']: v for v in json.loads(req_result)['changes']}
        self.assertEqual('deleted', res_index['/test1']['status'])
        self.assertEqual('new', res_index['/test2']['status'])
        self.assertEqual('new', res_index['/test3']['status'])
        self.assertEqual('new', res_index['/test4']['status'])

        # update first repo, file should be deleted and new file added
        setup_client('client1')
        session_token = client.authenticate()
        client.update(session_token)

        # Verify changes are reflected in FS
        self.assertFalse(os.path.isfile(DATA_DIR + 'client1/test1'))
        self.assertEqual(test_content_2_2,
                         file_get_contents(DATA_DIR + 'client1/test2'))
        self.assertEqual(test_content_3,
                         file_get_contents(DATA_DIR + 'client1/test3'))
        self.assertEqual(test_content_4,
                         file_get_contents(DATA_DIR + 'client1/test4'))

        time.sleep(0.5)  # See above

        #==================================================
        # setup for next test
        #==================================================
        file_put_contents(DATA_DIR + 'client1/test1', test_content_1)
        file_put_contents(DATA_DIR + 'client1/test5', test_content_1)
        file_put_contents(DATA_DIR + 'client1/test6', test_content_1)

        setup_client('client1')
        client.commit(client.authenticate(), 'test setup')

        setup_client('client2')
        client.update(client.authenticate())

        time.sleep(0.5)  # See above

        #==================================================
        # test conflict resolution, both to the server
        # and client version
        #==================================================
        # Delete on client, change on server resolution
        file_put_contents(DATA_DIR + 'client1/test1', test_content_5 + b'11')
        os.unlink(DATA_DIR + 'client2/test1')

        file_put_contents(DATA_DIR + 'client1/test2', test_content_5 + b'00')
        os.unlink(DATA_DIR + 'client2/test2')

        # Delete on server, change on client resolution
        os.unlink(DATA_DIR + 'client1/test5')
        file_put_contents(DATA_DIR + 'client2/test5', test_content_5 + b'ff')

        os.unlink(DATA_DIR + 'client1/test6')
        file_put_contents(DATA_DIR + 'client2/test6', test_content_5 + b'gg')

        # Double change resolution
        file_put_contents(DATA_DIR + 'client1/test3', test_content_5 + b'aa')
        file_put_contents(DATA_DIR + 'client2/test3', test_content_5 + b'bb')

        file_put_contents(DATA_DIR + 'client1/test4', test_content_5 + b'cc')
        file_put_contents(DATA_DIR + 'client2/test4', test_content_5 + b'dd')

        # commit both clients second to commit should error
        setup_client('client1')
        session_token = client.authenticate()
        version_id = client.commit(session_token,
                                   'initial commit for conflict test')

        setup_client('client2')
        session_token = client.authenticate()
        try:
            version_id = client.commit(session_token, 'this should conflict')
            self.fail()
        except SystemExit:
            pass

        # Update should begin conflict resolution process
        try:
            client.update(session_token, testing=True)
            self.fail()
        except SystemExit:
            pass

        # test server versions of conflict files downloaded correctly
        self.assertEqual(file_get_contents(DATA_DIR + 'client1/test1'),
                         test_content_5 + b'11')
        self.assertEqual(file_get_contents(DATA_DIR + 'client1/test2'),
                         test_content_5 + b'00')
        self.assertEqual(file_get_contents(DATA_DIR + 'client1/test3'),
                         test_content_5 + b'aa')
        self.assertEqual(file_get_contents(DATA_DIR + 'client1/test4'),
                         test_content_5 + b'cc')
        # NOTE nothing to download in delete on server case

        #test resolving it
        path = DATA_DIR + 'client2/.shttpfs/conflict_resolution.json'
        resolve = json.loads(file_get_contents(path))
        resolve_index = {v['1_path']: v for v in resolve}

        resolve_index['/test1']['4_resolution'] = ['client']
        resolve_index['/test2']['4_resolution'] = ['server']
        resolve_index['/test3']['4_resolution'] = ['client']
        resolve_index['/test4']['4_resolution'] = ['server']
        resolve_index['/test5']['4_resolution'] = ['client']
        resolve_index['/test6']['4_resolution'] = ['server']

        file_put_contents(
            path,
            json.dumps([v
                        for v in list(resolve_index.values())]).encode('utf8'))

        # perform update and test resolve as expected
        client.update(session_token)
        self.assertFalse(os.path.isfile(DATA_DIR + 'client2/test1'))
        self.assertEqual(test_content_5 + b'00',
                         file_get_contents(DATA_DIR + 'client2/test2'))
        self.assertEqual(test_content_5 + b'bb',
                         file_get_contents(DATA_DIR + 'client2/test3'))
        self.assertEqual(test_content_5 + b'cc',
                         file_get_contents(DATA_DIR + 'client2/test4'))
        self.assertEqual(test_content_5 + b'ff',
                         file_get_contents(DATA_DIR + 'client2/test5'))
        self.assertFalse(os.path.isfile(DATA_DIR + 'client2/test6'))

        # This should now commit
        version_id = client.commit(session_token, 'this should be ok')
        self.assertNotEqual(None, version_id)

        req_result = client.get_changes_in_version(session_token,
                                                   version_id)[0]
        res_index = {v['path']: v for v in json.loads(req_result)['changes']}

        self.assertEqual('deleted', res_index['/test1']['status'])
        self.assertTrue('/test2' not in res_index)
        self.assertEqual('new', res_index['/test3']['status'])
        self.assertTrue('/test4' not in res_index)
        self.assertEqual('new', res_index['/test5']['status'])
        self.assertTrue('/test6' not in res_index)

        #==================================================
        delete_data_dir()
Пример #12
0
def run():
    global server_connection, config

    args = list(sys.argv)[1:]

    #----------------------------
    if len(args) == 0 or args[0] == '-h':
        print("""
    update,   update the working copy to the current state on the server
    commit,   commit any changes to the working copy to the server
    sync,     periodically sync the working copy with the server automatically

    Setup:
    keygen,   generate a new public and private keypiar
    checkout, check out a working copy from a server
        """)

        #----------------------------
    elif args[0] == 'keygen':
        private_key, public_key = crypto.make_keypair()
        print('\nPrivate key:\n' + private_key.decode('utf8'))
        print('\nPublic key: \n' + public_key.decode('utf8') + '\n')

    #----------------------------
    elif args[0] == 'checkout':
        plain_input = get_if_set_or_quit(args, 1, 'URL is missing')

        result = urllib.parse.urlparse(plain_input)
        repository_name = [f for f in result.path.split('/') if f != ""]
        server_domain = result.scheme + '://' + result.netloc
        if result.scheme not in [
                'http', 'https'
        ] or result.netloc == '' or len(repository_name) != 1:
            raise SystemExit(
                "Invalid URL, usage: http(s)//:domain:optional port/[repository name]. Repository names cannot contain '/'."
            )

        # get user
        print(
            'Please enter the user name for this repository, then press enter.'
        )
        user = input('> ').strip(' \t\n\r')
        if user == '': raise SystemExit('Key is blank, exiting.')

        # get private key
        print(
            'Please enter the private key for this repository, then press enter.'
        )
        private_key = input('> ').strip(' \t\n\r')
        if private_key == '': raise SystemExit('Key is blank, exiting.')

        #---------------
        config = {
            "server_domain": server_domain,
            "repository": repository_name[0],
            "user": "******",
            "private_key": private_key
        }

        # Validate info is correct by attempting to authenticate
        server_connection = client_http_request(config['server_domain'])
        unlocked_key = config["private_key"] = crypto.unlock_private_key(
            config["private_key"])
        session_token: str = authenticate()
        config["private_key"] = private_key

        # create repo dir
        try:
            os.makedirs(repository_name)
        except:
            raise SystemExit('Directory already exists')
        os.makedirs(cpjoin(repository_name, '.shttpfs'))
        file_put_contents(
            cpjoin(repository_name, '.shttpfs', 'client_configuration.json'),
            json.dumps(config, indent=4))

        config["private_key"] = unlocked_key
        os.chdir(repository_name)
        init(True)
        update(session_token)

    #----------------------------
    elif args[0] == 'update':
        init()
        update(authenticate())

    #----------------------------
    elif args[0] == 'commit':
        commit_message = ''
        if get_if_set_or_default(args, 1, '') == '-m':
            commit_message = get_if_set_or_quit(
                args, 2, 'Please specify a commit message after -m')
        init()
        commit(authenticate(), commit_message)

    #----------------------------
    elif args[0] == 'sync':
        init()
        session_token: str = authenticate()

        commit_message = ''
        if get_if_set_or_default(args, 1, '') == '-m':
            commit_message = get_if_set_or_quit(
                args, 2, 'Please specify a commit message after -m')

        update(session_token)
        commit(session_token, commit_message)

    #----------------------------
    elif args[0] == 'autosync':
        init()
        session_token: str = None
        while True:
            session_token = authenticate(session_token)
            update(session_token)
            commit(session_token)
            time.sleep(60)

    #----------------------------
    elif args[0] == 'list_versions':
        init()
        session_token: str = authenticate()
        req_result, headers = get_versions(session_token)

        if headers['status'] == 'ok':
            for vers in reversed(json.loads(req_result)['versions']):
                print('Commit:  ' + vers['id'])
                print('Date:    ' + vers['utc_date_time'] + ' (UTC) ')
                print('By user: '******'commit_by'])
                print('\n' + vers['commit_message'])
                print()

    #----------------------------
    elif args[0] == 'list_changes':
        init()
        session_token: str = authenticate()
        version_id = get_if_set_or_quit(args, 1, 'Please specify a version id')
        req_result, headers = get_changes_in_version(session_token, version_id)

        if headers['status'] == 'ok':
            for change in json.loads(req_result)['changes']:
                print(change['status'] + '     ' + change['path'])

    #----------------------------
    elif args[0] == 'list_files':
        init()
        session_token: str = authenticate()
        version_id = get_if_set_or_quit(args, 1, 'Please specify a version id')
        req_result, headers = get_files_in_version(session_token, version_id)

        if headers['status'] == 'ok':
            for fle in json.loads(req_result)['files']:
                print(fle)
Пример #13
0
def update(session_token: str, testing=False):
    """ Compare changes on the client to changes on the server and update local files
    which have changed on the server. """

    conflict_comparison_file_dest = cpjoin(config['data_dir'], '.shttpfs',
                                           'conflict_files')
    conflict_resolution_path = cpjoin(config['data_dir'], '.shttpfs',
                                      'conflict_resolution.json')
    conflict_resolutions = json.loads(
        file_or_default(
            conflict_resolution_path,
            '[]',
        ))

    # Make sure that all conflicts have been resolved, reject if not
    if not all(len(c['4_resolution']) == 1 for c in conflict_resolutions):
        conflict_resolutions = []

    # Send the changes and the revision of the most recent update to the server to find changes
    manifest, client_changes = find_local_changes()

    req_result, headers = server_connection.request(
        "find_changed", {
            "session_token": session_token,
            'repository': config['repository'],
            "previous_revision": manifest['have_revision'],
        }, {
            "client_changes": json.dumps(client_changes),
            "conflict_resolutions": json.dumps(conflict_resolutions)
        })

    if headers['status'] != 'ok':
        if headers['msg'] == 'Please resolve conflicts':
            raise SystemExit(
                'Server error: Please resolve conflicts in .shttpfs/conflict_resolution.json'
            )
        else:
            raise SystemExit('Server error')

    result = json.loads(req_result)
    changes = result['sorted_changes']

    # Are there any changes?
    if all(v == [] for k, v in changes.items()):
        print('Nothing to update')
        return

    # Pull and delete from remote to local
    if changes['client_pull_files'] != []:
        # Filter out pull ignore files
        filtered_pull_files = []
        for fle in changes['client_pull_files']:
            if not next((True for flter in config['pull_ignore_filters']
                         if fnmatch.fnmatch(fle['path'], flter)), False):
                filtered_pull_files.append(fle)
            else:  # log ignored items to give the opportunity to pull them in the future
                with open(
                        cpjoin(working_copy_base_path, '.shttpfs',
                               'pull_ignored_items'), 'a') as pull_ignore_log:
                    pull_ignore_log.write(json.dumps((result['head'], fle)))
                    pull_ignore_log.flush()

        if filtered_pull_files != []:
            print('Pulling files from server...')

        #----------
        for fle in filtered_pull_files:
            print('Pulling file: ' + fle['path'])

            req_result, headers = server_connection.request(
                "pull_file", {
                    'session_token': session_token,
                    'repository': config['repository'],
                    'path': fle['path']
                },
                gen=True)

            if headers['status'] != 'ok':
                raise SystemExit('Failed to pull file')
            else:
                make_dirs_if_dont_exist(
                    data_store.get_full_file_path(
                        cpjoin(*fle['path'].split('/')[:-1]) + '/'))
                data_store.fs_put(fle['path'], req_result)

    # Files which have been deleted on server and need deleting on client
    if changes['to_delete_on_client'] != []:
        print('Removing files deleted on the server...')

        for fle in changes['to_delete_on_client']:
            print('Deleting file: ' + fle['path'])

            try:
                data_store.fs_delete(fle['path'])
            except OSError:
                print('Warning: remote deleted file does not exist locally.')

            # Delete the folder if it is now empty
            try:
                os.removedirs(
                    os.path.dirname(data_store.get_full_file_path(
                        fle['path'])))
            except OSError as e:
                if e.errno not in [errno.ENOTEMPTY, errno.ENOENT]: raise

    # Files which are in conflict
    if changes['conflict_files'] != []:
        print("There are conflicts!\n")

        out = []
        server_versions = []
        for fle in changes['conflict_files']:
            fle['resolution'] = ['local', 'remote']
            print('Path:          ' + fle['file_info']['path'])
            print('Client status: ' + fle['client_status'])
            print('Server status: ' + fle['server_status'])
            print()
            out.append({
                '1_path': fle['file_info']['path'],
                '2_client_status': fle['client_status'],
                '3_server_status': fle['server_status'],
                '4_resolution': ['client', 'server']
            })
            if fle['server_status'] == 'Changed':
                server_versions.append(fle['file_info'])

        #===============
        if server_versions != []:
            choice = None
            if not testing:
                while True:
                    print('Download server versions for comparison? (Y/N)')
                    choice = input()
                    if choice.lower() in ['y', 'n']: break
            else: choice = 'y'

            errors = []
            if choice == 'y':
                for fle in server_versions:
                    print('Pulling file: ' + fle['path'])

                    result, headers = server_connection.request(
                        "pull_file", {
                            'session_token': session_token,
                            'repository': config['repository'],
                            'path': fle['path']
                        },
                        gen=True)

                    if headers['status'] != 'ok':
                        errors.append(fle['path'])

                    else:
                        make_dirs_if_dont_exist(
                            cpjoin(conflict_comparison_file_dest, *
                                   fle['path'].split('/')[:-1]) + '/')
                        result(
                            cpjoin(conflict_comparison_file_dest, fle['path']))

                print(
                    'Server versions of conflicting files written to .shttpfs/conflict_files\n'
                )

            pprint(errors)

        # ====================

        file_put_contents(
            conflict_resolution_path,
            json.dumps(out, indent=4, sort_keys=True).encode('utf8'))
        raise SystemExit(
            "Conflict resolution file written to .shttpfs/conflict_resolution.json\n"
            +
            "Please edit this file removing 'client', or 'server' to choose which version to retain."
        )

    # Update the latest revision in the manifest only if there are no conflicts
    else:
        data_store.begin()
        manifest = data_store.read_local_manifest()
        manifest['have_revision'] = result['head']
        data_store.write_local_manifest(manifest)
        data_store.commit()

        #delete the conflicts resolution file and recursively delete any conflict files downloaded for comparison
        ignore(os.remove, conflict_resolution_path)
        ignore(shutil.rmtree, conflict_comparison_file_dest)

        if changes['to_delete_on_server'] != [] or changes[
                'client_push_files'] != []:
            print('There are local changes to commit')
        else:
            print('Update OK')