def commit(self, commit_message, commit_by, commit_datetime=None) -> str: if not self.have_active_commit(): raise Exception() current_changes = json.loads( sfs.file_get_contents( sfs.cpjoin(self.base_path, 'active_commit_changes'))) active_files = json.loads( sfs.file_get_contents( sfs.cpjoin(self.base_path, 'active_commit_files'))) if current_changes == []: raise Exception('Empty commit') # Create and store the file tree tree_root = self.write_dir_tree(self.build_dir_tree(active_files)) # If no commit message is passed store an indication of what was changed if commit_message == '': new_item = next((change for change in current_changes if change['status'] in ['new', 'changed']), None) deleted_item = next((change for change in current_changes if change['status'] == 'deleted'), None) commit_message = "(Generated message)\n" if new_item is not None: commit_message += new_item['status'] + ' ' + new_item[ 'path'] + '\n' if deleted_item is not None: commit_message += deleted_item[ 'status'] + ' ' + deleted_item['path'] + '\n' if len(current_changes) > 2: commit_message += '...' # Commit timestamp commit_datetime = datetime.utcnow( ) if commit_datetime is None else commit_datetime commit_timestamp = commit_datetime.strftime("%d-%m-%Y %H:%M:%S:%f") # Create commit commit_object_hash = self.write_index_object( 'commit', { 'parent': self.get_head(), 'utc_date_time': commit_timestamp, 'commit_by': commit_by, 'commit_message': commit_message, 'tree_root': tree_root, 'changes': current_changes }) #update head, write plus move for atomicity sfs.file_put_contents(sfs.cpjoin(self.base_path, 'new_head'), bytes(commit_object_hash, encoding='utf8')) os.rename(sfs.cpjoin(self.base_path, 'new_head'), sfs.cpjoin(self.base_path, 'head')) #and clean up working state os.remove(sfs.cpjoin(self.base_path, 'active_commit_changes')) os.remove(sfs.cpjoin(self.base_path, 'active_commit_files')) sfs.ignore(os.remove, sfs.cpjoin(self.base_path, 'gc_log')) os.remove(sfs.cpjoin(self.base_path, 'active_commit')) return commit_object_hash
def read_index_object(self, object_hash: str, expected_object_type: str) -> indexObject: index_object: indexObject = json.loads( sfs.file_get_contents( sfs.cpjoin(self.base_path, 'index', object_hash[:2], object_hash[2:]))) if index_object['type'] != expected_object_type: raise IOError('Type of object does not match expected type') return index_object
def read_user_lock(repository_path: str): try: user_lock = file_get_contents(cpjoin(repository_path, 'user_file')) if user_lock == '': return None return json.loads(user_lock) except IOError: return None except ValueError: return None
def init(unlocked=False): global data_store, server_connection, config try: config = json.loads( file_get_contents( cpjoin(working_copy_base_path, '.shttpfs', 'client_configuration.json'))) except IOError: raise SystemExit('No shttpfs configuration found') except ValueError: raise SystemExit('Configuration file syntax error') # Lock for sanity check, only one client can use the working copy at any time try: lockfile = open( cpjoin(working_copy_base_path, '.shttpfs', 'lock_file'), 'w') fcntl.flock(lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError: raise SystemExit('Could not lock working copy') #----------- ignore_filters: str = file_or_default( cpjoin(working_copy_base_path, '.shttpfs_ignore'), b'').decode('utf8') pull_ignore_filters: str = file_or_default( cpjoin(working_copy_base_path, '.shttpfs_pull_ignore'), b'').decode('utf8') #----------- config['ignore_filters']: List[str] = ['/.shttpfs*' ] + ignore_filters.splitlines() config['pull_ignore_filters']: List[str] = pull_ignore_filters.splitlines() config['data_dir']: str = working_copy_base_path if not unlocked: config["private_key"] = crypto.unlock_private_key( config["private_key"]) data_store = plain_storage(config['data_dir']) server_connection = client_http_request(config['server_domain'])
def update_system_file(self, file_name: str, callback) -> None: contents = json.loads( sfs.file_get_contents(sfs.cpjoin(self.base_path, file_name))) contents = callback(contents) sfs.file_put_contents(sfs.cpjoin(self.base_path, file_name), bytes(json.dumps(contents), encoding='utf8'))
def test_system(self): test_content_1 = b'test file jhgrtelkj' test_content_2 = b''.join([struct.pack('B', i) for i in range(256) ]) # binary string with all byte values test_content_2_2 = test_content_2[::-1] test_content_3 = b'test content 3 sdavcxreiltlj' test_content_4 = b'test content 4 fsdwqtruytuyt' test_content_5 = b'test content 5 .,myuitouys' #========= setup() setup_client('client1') #================================================== # test_initial commit #================================================== file_put_contents(DATA_DIR + 'client1/test1', test_content_1) file_put_contents(DATA_DIR + 'client1/test2', test_content_2) # test with a binary blob #file_put_contents(DATA_DIR + u'client1/GȞƇØzÇ â˜¸k😒â™', test_content_2) # test unicode file name # commit the files session_token = client.authenticate() print(session_token) version_id = client.commit(session_token, 'test commit') self.assertNotEqual(version_id, None) # commit message should be in log req_result = client.get_versions(session_token)[0] self.assertEqual( 'test commit', json.loads(req_result)['versions'][0]['commit_message']) # file should show up in list_changes req_result = client.get_files_in_version(session_token, version_id)[0] self.assertTrue('/test1' in json.loads(req_result)['files']) self.assertTrue('/test2' in json.loads(req_result)['files']) # file should exist in server fs self.assertEqual( test_content_1, file_get_contents(DATA_DIR + 'server/files/' + get_server_file_name(test_content_1))) self.assertEqual( test_content_2, file_get_contents(DATA_DIR + 'server/files/' + get_server_file_name(test_content_2))) # NOTE As change detection is done using access timestamps, need a # delay between tests to make sure changes are detected correctly time.sleep(0.5) #================================================== # test update #================================================== setup_client('client2') session_token = client.authenticate() client.update(session_token) self.assertEqual(test_content_1, file_get_contents(DATA_DIR + 'client2/test1')) self.assertEqual(test_content_2, file_get_contents(DATA_DIR + 'client2/test2')) time.sleep(0.5) # See above #================================================== # test delete and add #================================================== os.unlink(DATA_DIR + 'client2/test1') file_put_contents(DATA_DIR + 'client2/test2', test_content_2_2) # test changing an existing file file_put_contents(DATA_DIR + 'client2/test3', test_content_3) file_put_contents(DATA_DIR + 'client2/test4', test_content_4) setup_client('client2') session_token = client.authenticate() version_id = client.commit(session_token, 'create and delete some files') # check change is reflected correctly in the commit log req_result = client.get_changes_in_version(session_token, version_id)[0] res_index = {v['path']: v for v in json.loads(req_result)['changes']} self.assertEqual('deleted', res_index['/test1']['status']) self.assertEqual('new', res_index['/test2']['status']) self.assertEqual('new', res_index['/test3']['status']) self.assertEqual('new', res_index['/test4']['status']) # update first repo, file should be deleted and new file added setup_client('client1') session_token = client.authenticate() client.update(session_token) # Verify changes are reflected in FS self.assertFalse(os.path.isfile(DATA_DIR + 'client1/test1')) self.assertEqual(test_content_2_2, file_get_contents(DATA_DIR + 'client1/test2')) self.assertEqual(test_content_3, file_get_contents(DATA_DIR + 'client1/test3')) self.assertEqual(test_content_4, file_get_contents(DATA_DIR + 'client1/test4')) time.sleep(0.5) # See above #================================================== # setup for next test #================================================== file_put_contents(DATA_DIR + 'client1/test1', test_content_1) file_put_contents(DATA_DIR + 'client1/test5', test_content_1) file_put_contents(DATA_DIR + 'client1/test6', test_content_1) setup_client('client1') client.commit(client.authenticate(), 'test setup') setup_client('client2') client.update(client.authenticate()) time.sleep(0.5) # See above #================================================== # test conflict resolution, both to the server # and client version #================================================== # Delete on client, change on server resolution file_put_contents(DATA_DIR + 'client1/test1', test_content_5 + b'11') os.unlink(DATA_DIR + 'client2/test1') file_put_contents(DATA_DIR + 'client1/test2', test_content_5 + b'00') os.unlink(DATA_DIR + 'client2/test2') # Delete on server, change on client resolution os.unlink(DATA_DIR + 'client1/test5') file_put_contents(DATA_DIR + 'client2/test5', test_content_5 + b'ff') os.unlink(DATA_DIR + 'client1/test6') file_put_contents(DATA_DIR + 'client2/test6', test_content_5 + b'gg') # Double change resolution file_put_contents(DATA_DIR + 'client1/test3', test_content_5 + b'aa') file_put_contents(DATA_DIR + 'client2/test3', test_content_5 + b'bb') file_put_contents(DATA_DIR + 'client1/test4', test_content_5 + b'cc') file_put_contents(DATA_DIR + 'client2/test4', test_content_5 + b'dd') # commit both clients second to commit should error setup_client('client1') session_token = client.authenticate() version_id = client.commit(session_token, 'initial commit for conflict test') setup_client('client2') session_token = client.authenticate() try: version_id = client.commit(session_token, 'this should conflict') self.fail() except SystemExit: pass # Update should begin conflict resolution process try: client.update(session_token, testing=True) self.fail() except SystemExit: pass # test server versions of conflict files downloaded correctly self.assertEqual(file_get_contents(DATA_DIR + 'client1/test1'), test_content_5 + b'11') self.assertEqual(file_get_contents(DATA_DIR + 'client1/test2'), test_content_5 + b'00') self.assertEqual(file_get_contents(DATA_DIR + 'client1/test3'), test_content_5 + b'aa') self.assertEqual(file_get_contents(DATA_DIR + 'client1/test4'), test_content_5 + b'cc') # NOTE nothing to download in delete on server case #test resolving it path = DATA_DIR + 'client2/.shttpfs/conflict_resolution.json' resolve = json.loads(file_get_contents(path)) resolve_index = {v['1_path']: v for v in resolve} resolve_index['/test1']['4_resolution'] = ['client'] resolve_index['/test2']['4_resolution'] = ['server'] resolve_index['/test3']['4_resolution'] = ['client'] resolve_index['/test4']['4_resolution'] = ['server'] resolve_index['/test5']['4_resolution'] = ['client'] resolve_index['/test6']['4_resolution'] = ['server'] file_put_contents( path, json.dumps([v for v in list(resolve_index.values())]).encode('utf8')) # perform update and test resolve as expected client.update(session_token) self.assertFalse(os.path.isfile(DATA_DIR + 'client2/test1')) self.assertEqual(test_content_5 + b'00', file_get_contents(DATA_DIR + 'client2/test2')) self.assertEqual(test_content_5 + b'bb', file_get_contents(DATA_DIR + 'client2/test3')) self.assertEqual(test_content_5 + b'cc', file_get_contents(DATA_DIR + 'client2/test4')) self.assertEqual(test_content_5 + b'ff', file_get_contents(DATA_DIR + 'client2/test5')) self.assertFalse(os.path.isfile(DATA_DIR + 'client2/test6')) # This should now commit version_id = client.commit(session_token, 'this should be ok') self.assertNotEqual(None, version_id) req_result = client.get_changes_in_version(session_token, version_id)[0] res_index = {v['path']: v for v in json.loads(req_result)['changes']} self.assertEqual('deleted', res_index['/test1']['status']) self.assertTrue('/test2' not in res_index) self.assertEqual('new', res_index['/test3']['status']) self.assertTrue('/test4' not in res_index) self.assertEqual('new', res_index['/test5']['status']) self.assertTrue('/test6' not in res_index) #================================================== delete_data_dir()
def send_file(self, url, headers, file_path): reader = BytesIO(file_get_contents(file_path)) res = self.request_helper(url, headers, reader) return res.body, dict(res.headers)