def test_create_update_and_delete_user(): response = requests.post(uri('auth', '/tokens'), headers=login_header('ralph', 'secret')) assert response.status_code == 201 token = response.json()['token'] data = { 'username': generate_string(), 'password': '******', 'email': '{}@yoda.com'.format(generate_string()), } # Create user response = requests.post(uri('auth', '/users'), json=data, headers=token_header(token)) assert response.status_code == 201 user_id = response.json()['id'] # Update user and check update was successful data['first_name'] = 'John' response = requests.put(uri('auth', '/users/{}'.format(user_id)), json=data, headers=token_header(token)) assert response.status_code == 200 response = requests.get(uri('auth', '/users/{}'.format(user_id)), headers=token_header(token)) assert response.status_code == 200 assert response.json()['first_name'] == 'John' # Delete user and check it no longer exists response = requests.delete(uri('auth', '/users/{}'.format(user_id)), headers=token_header(token)) assert response.status_code == 204 response = requests.get(uri('auth', '/users/{}'.format(user_id)), headers=token_header(token)) assert response.status_code == 404
def upload_model_archive(file_path, repository_id, token): response = requests.get(uri('storage', '/file-types?name=binary'), headers=token_header(token)) file_type_id = response.json()[0]['id'] response = requests.get(uri('storage', '/scan-types?name=none'), headers=token_header(token)) scan_type_id = response.json()[0]['id'] try: _, storage_id = upload_file(file_path, file_type_id, scan_type_id, repository_id, token) return storage_id except RuntimeError as e: print('Failed to upload model ({})'.format(e.message))
def test_upload_and_download(): if os.getenv('DATA_DIR', None) is None: return response = requests.post(uri('auth', '/tokens'), headers=login_header('ralph', 'secret')) assert response.status_code == 201 token = response.json()['token'] response = requests.get(uri('storage', '/file-types?names=txt'), headers=token_header(token)) assert response.status_code == 200 file_type_id = response.json()[0]['id'] response = requests.get(uri('storage', '/scan-types?name=none'), headers=token_header(token)) assert response.status_code == 200 scan_type_id = response.json()[0]['id'] name = 'repository-{}'.format(generate_string(8)) response = requests.post(uri('storage', '/repositories'), json={'name': name}, headers=token_header(token)) assert response.status_code == 201 repository_id = response.json()['id'] file_name = os.path.join(os.getenv('DATA_DIR'), 'data.nii.gz') file_id, _ = upload_file(file_name, file_type_id, scan_type_id, repository_id, token) response = requests.get(uri( 'storage', '/repositories/{}/files/{}'.format(repository_id, file_id)), headers=token_header(token)) assert response.status_code == 200 storage_id = response.json()['storage_id'] response = requests.get(uri('storage', '/downloads/{}'.format(storage_id)), headers=token_header(token)) assert response.status_code == 200 assert response.content with open('tmp.nii.gz', 'wb') as f: for chunk in response.iter_content(1024 * 1024): f.write(chunk) n = os.path.getsize('tmp.nii.gz') m = os.path.getsize(file_name) assert n == m os.system('rm -f tmp.nii.gz')
def test_users(): response = requests.post(uri('auth', '/tokens'), headers=login_header('ralph', 'secret')) assert response.status_code == 201 token = response.json()['token'] response = requests.get(uri('auth', '/users'), headers=token_header(token)) assert response.status_code == 200 assert len(response.json()) >= 4
def test_create_update_and_delete_group(): response = requests.post(uri('auth', '/tokens'), headers=login_header('ralph', 'secret')) assert response.status_code == 201 token = response.json()['token'] data = {'name': generate_string()} # Create group response = requests.post(uri('auth', '/user-groups'), json=data, headers=token_header(token)) assert response.status_code == 201 user_group_id = response.json()['id'] # Update group and check update successful data['name'] = generate_string() response = requests.put(uri('auth', '/user-groups/{}'.format(user_group_id)), json=data, headers=token_header(token)) assert response.status_code == 200 response = requests.get(uri('auth', '/user-groups/{}'.format(user_group_id)), headers=token_header(token)) assert response.status_code == 200 assert response.json()['name'] == data['name'] # Add user to group and check it was added response = requests.get(uri('auth', '/users?username=ralph'), headers=token_header(token)) assert response.status_code == 200 user_id = response.json()[0]['id'] response = requests.put(uri('auth', '/user-groups/{}/users/{}'.format(user_group_id, user_id)), headers=token_header(token)) assert response.status_code == 200 response = requests.get(uri('auth', '/user-groups/{}/users'.format(user_group_id)), headers=token_header(token)) assert response.status_code == 200 assert len(response.json()) == 1 assert response.json()[0]['username'] == 'ralph' # Remove user from group and check it was successfully removed response = requests.delete(uri('auth', '/user-groups/{}/users/{}'.format(user_group_id, user_id)), headers=token_header(token)) assert response.status_code == 200 response = requests.get(uri('auth', '/user-groups/{}/users'.format(user_group_id)), headers=token_header(token)) assert response.status_code == 200 assert len(response.json()) == 0 # Delete group and check it no longer exists response = requests.delete(uri('auth', '/user-groups/{}'.format(user_group_id)), headers=token_header(token)) assert response.status_code == 204 response = requests.get(uri('auth', '/user-groups/{}'.format(user_group_id)), headers=token_header(token)) assert response.status_code == 404
def download_file(storage_id, target_dir, token, extension=None): response = requests.get(uri('storage', '/downloads/{}'.format(storage_id)), headers=token_header(token)) file_path = os.path.join(target_dir, storage_id) if extension: if not extension.startswith('.'): extension = '.{}'.format(extension) file_path = '{}{}'.format(file_path, extension) with open(file_path, 'wb') as f: for chunk in response.iter_content(1024 * 1024): f.write(chunk) return file_path
def test_check_token(): response = requests.post(uri('auth', '/tokens'), headers=login_header('ralph', 'secret')) assert response.status_code == 201 token1 = response.json()['token'] response = requests.post(uri('auth', '/tokens'), headers=login_header('quentin', 'secret')) assert response.status_code == 201 token2 = response.json()['token'] assert token1 != token2 response = requests.post(uri('auth', '/token-checks'), json={'token': token2}, headers=token_header(token1)) assert response.status_code == 201 response = requests.post(uri('auth', '/token-checks'), headers=token_header('1234')) assert response.status_code == 403
def upload_file(file_name, file_type_id, scan_type_id, repository_id, token): session_id = None file_id = None storage_id = None with open(file_name, 'rb') as f: i = 0 n = os.path.getsize(file_name) chunk_size = int(n / 10) for chunk in read_chunks(f, chunk_size): content_length = len(chunk) content_range = 'bytes {}-{}/{}'.format(i, i + len(chunk) - 1, n) headers = token_header(token) headers.update({ 'Content-Length': '{}'.format(content_length), 'Content-Type': 'application/octet-stream', 'Content-Disposition': 'attachment; filename={}'.format(file_name), 'X-Content-Range': content_range, 'X-Session-ID': session_id, 'X-File-Type': '{}'.format(file_type_id), 'X-Scan-Type': '{}'.format(scan_type_id), 'X-Repository-ID': '{}'.format(repository_id) }) response = requests.post(uri('storage', '/uploads'), headers=headers, data=chunk) if response.status_code == 201: file_id = response.json()['id'] storage_id = response.json()['storage_id'] break if response.status_code != 202: raise RuntimeError('Response not 202 ({})'.format( response.status_code)) session_id = response.headers['X-Session-ID'] i += len(chunk) return file_id, storage_id
def get_storage_id_for_file(repository_id, file_id, token): response = requests.get( uri('storage', '/repositories/{}/files/{}'.format(repository_id, file_id)), headers=token_header(token)) storage_id = response.json()['storage_id'] return storage_id
def test_train_classifier(): if os.getenv('DATA_DIR', None) is None: return # Get access token response = requests.post(uri('auth', '/tokens'), headers=login_header('ralph', 'secret')) assert response.status_code == 201 token = response.json()['token'] # Create storage repository name = 'repository-{}'.format(generate_string(8)) response = requests.post(uri('storage', '/repositories'), headers=token_header(token), json={'name': name}) assert response.status_code == 201 repository_id = response.json()['id'] # Get CSV file type ID response = requests.get(uri('storage', '/file-types?name=csv'), headers=token_header(token)) assert response.status_code == 200 file_type_id = response.json()[0]['id'] # Get scan type ID response = requests.get(uri('storage', '/scan-types?name=none'), headers=token_header(token)) assert response.status_code == 200 scan_type_id = response.json()[0]['id'] # Load features, extract HC and SZ subjects, remove categorical columns and # save the feature file back to disk file_path = os.path.join(os.getenv('DATA_DIR'), 'data.csv') features = pd.read_csv(file_path, index_col='MRid') subject_labels = list(features['Diagnosis']) # Upload CSV file with brain features file_id, _ = upload_file(file_path, file_type_id, scan_type_id, repository_id, token) assert file_id # Train classifier using the uploaded CSV file. As parameters we specify the # pipeline ID (which in this case is a classifier training pipeline). The 'file_id' # refers to the CSV file. The parameter 'subject_labels' contains a list of diagnostic # labels. This list is used to pre-calculate training and testing indices which can be # passed to the different workers handling the cross-validation folds in parallel. response = requests.post(uri('compute', '/tasks'), headers=token_header(token), json={ 'pipeline_name': 'svm_train', 'params': { 'repository_id': repository_id, 'file_id': file_id, 'subject_labels': subject_labels, 'nr_folds': 2, 'index_column': 'MRid', 'target_column': 'Diagnosis', 'kernel': 'rbf', } }) assert response.status_code == 201 task_id = response.json()['id'] # Retrieve task status periodically until it finishes successfully. In practice, # this means the task status == SUCCESS and result != None classifier_id = 0 while True: response = requests.get(uri('compute', '/tasks/{}'.format(task_id)), headers=token_header(token)) assert response.status_code == 200 status = response.json()['status'] assert status == 'PENDING' or status == 'SUCCESS' result = response.json()['result'] sys.stdout.write('.') sys.stdout.flush() if status == 'SUCCESS' and result is not None: classifier_id = result['classifier_id'] break time.sleep(2) # Remove diagnosis column from the feature data. Then select the first subject # so we can send it to the classifier for prediction. features.drop('Diagnosis', axis=1, inplace=True) subject_data = list(features.iloc[0]) subject_label = subject_labels[0] # Send some data to the trained classifier for prediction response = requests.post(uri('compute', '/tasks'), headers=token_header(token), json={ 'pipeline_name': 'svm_predict', 'params': { 'classifier_id': classifier_id, 'subjects': [ subject_data, ], } }) assert response.status_code == 201 task_id = response.json()['id'] while True: response = requests.get(uri('compute', '/tasks/{}'.format(task_id)), headers=token_header(token)) assert response.status_code == 200 status = response.json()['status'] assert status == 'PENDING' or status == 'SUCCESS' result = response.json()['result'] sys.stdout.write('.') sys.stdout.flush() if status == 'SUCCESS' and result is not None: assert subject_label == result['predicted_labels'][0] break time.sleep(2)