def test_Evaluation(): """Test the construction and accessors of Evaluation objects.""" #Status can only be one of ['OPEN', 'PLANNED', 'CLOSED', 'COMPLETED'] assert_raises(ValueError, Evaluation, name='foo', description='bar', status='BAH') assert_raises(ValueError, Evaluation, name='foo', description='bar', status='OPEN', contentSource='a') #Assert that the values are ev = Evaluation(name='foobar2', description='bar', status='OPEN', contentSource='syn1234') assert(ev['name']==ev.name) assert(ev['description']==ev.description) assert(ev['status']==ev.status)
def test_evaluations(): # Create an Evaluation name = 'Test Evaluation %s' % str(uuid.uuid4()) ev = Evaluation(name=name, description='Evaluation for testing', contentSource=project['id'], status='CLOSED') ev = syn.store(ev) try: # -- Get the Evaluation by name evalNamed = syn.getEvaluationByName(name) assert_equals(ev['contentSource'], evalNamed['contentSource']) assert_equals(ev['createdOn'], evalNamed['createdOn']) assert_equals(ev['description'], evalNamed['description']) assert_equals(ev['etag'], evalNamed['etag']) assert_equals(ev['id'], evalNamed['id']) assert_equals(ev['name'], evalNamed['name']) assert_equals(ev['ownerId'], evalNamed['ownerId']) assert_equals(ev['status'], evalNamed['status']) # -- Get the Evaluation by project evalProj = syn.getEvaluationByContentSource(project) evalProj = next(evalProj) assert_equals(ev['contentSource'], evalProj['contentSource']) assert_equals(ev['createdOn'], evalProj['createdOn']) assert_equals(ev['description'], evalProj['description']) assert_equals(ev['etag'], evalProj['etag']) assert_equals(ev['id'], evalProj['id']) assert_equals(ev['name'], evalProj['name']) assert_equals(ev['ownerId'], evalProj['ownerId']) assert_equals(ev['status'], evalProj['status']) # Update the Evaluation ev['status'] = 'OPEN' ev = syn.store(ev, createOrUpdate=True) assert_equals(ev.status, 'OPEN') # Add the current user as a participant myOwnerId = int(syn.getUserProfile()['ownerId']) syn._allowParticipation(ev, myOwnerId) # AUTHENTICATED_USERS = 273948 # PUBLIC = 273949 syn.setPermissions(ev, 273948, accessType=['READ']) syn.setPermissions(ev, 273949, accessType=['READ']) # test getPermissions permissions = syn.getPermissions(ev, 273949) assert_equals(['READ'], permissions) permissions = syn.getPermissions(ev, syn.getUserProfile()['ownerId']) for p in [ 'READ', 'CREATE', 'DELETE', 'UPDATE', 'CHANGE_PERMISSIONS', 'READ_PRIVATE_SUBMISSION' ]: assert_in(p, permissions) # Test getSubmissions with no Submissions (SYNR-453) submissions = syn.getSubmissions(ev) assert_equals(len(list(submissions)), 0) # Increase this to fully test paging by getEvaluationSubmissions # not to be less than 2 num_of_submissions = 2 # Create a bunch of Entities and submit them for scoring for i in range(num_of_submissions): with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: filename = f.name f.write(str(random.gauss(0, 1)) + '\n') f = File(filename, parentId=project.id, name='entry-%02d' % i, description='An entry for testing evaluation') entity = syn.store(f) syn.submit(ev, entity, name='Submission %02d' % i, submitterAlias='My Team') # Score the submissions submissions = syn.getSubmissions(ev, limit=num_of_submissions - 1) for submission in submissions: assert_true(re.match('Submission \d+', submission['name'])) status = syn.getSubmissionStatus(submission) status.score = random.random() if submission['name'] == 'Submission 01': status.status = 'INVALID' status.report = 'Uh-oh, something went wrong!' else: status.status = 'SCORED' status.report = 'a fabulous effort!' syn.store(status) # Annotate the submissions bogosity = {} submissions = syn.getSubmissions(ev) b = 123 for submission, status in syn.getSubmissionBundles(ev): bogosity[submission.id] = b a = dict(foo='bar', bogosity=b) b += 123 status['annotations'] = to_submission_status_annotations(a) set_privacy(status['annotations'], key='bogosity', is_private=False) syn.store(status) # Test that the annotations stuck for submission, status in syn.getSubmissionBundles(ev): a = from_submission_status_annotations(status.annotations) assert_equals(a['foo'], 'bar') assert_equals(a['bogosity'], bogosity[submission.id]) for kvp in status.annotations['longAnnos']: if kvp['key'] == 'bogosity': assert_false(kvp['isPrivate']) # test query by submission annotations # These queries run against an eventually consistent index table which is # populated by an asynchronous worker. Thus, the queries may remain out # of sync for some unbounded, but assumed to be short time. attempts = 2 while attempts > 0: try: results = syn.restGET( "/evaluation/submission/query?query=SELECT+*+FROM+evaluation_%s" % ev.id) assert_equals(len(results['rows']), num_of_submissions + 1) results = syn.restGET( "/evaluation/submission/query?query=SELECT+*+FROM+evaluation_%s where bogosity > 200" % ev.id) assert_equals(len(results['rows']), num_of_submissions) except AssertionError as ex1: attempts -= 1 time.sleep(2) else: attempts = 0 # Test that we can retrieve submissions with a specific status invalid_submissions = list(syn.getSubmissions(ev, status='INVALID')) assert_equals(len(invalid_submissions), 1, len(invalid_submissions)) assert_equals(invalid_submissions[0]['name'], 'Submission 01') finally: # Clean up syn.delete(ev) if 'testSyn' in locals(): if 'other_project' in locals(): # Clean up, since the current user can't access this project # This also removes references to the submitted object :) testSyn.delete(other_project) if 'team' in locals(): # remove team testSyn.delete(team) # Just deleted it. Shouldn't be able to get it. assert_raises(SynapseHTTPError, syn.getEvaluation, ev)
def test_command_line_store_and_submit(): # Create a Project output = run('synapse', '--skip-checks', 'store', '--name', str(uuid.uuid4()), '--description', 'test of store command', '--type', 'Project') project_id = parse(r'Created/Updated entity:\s+(syn\d+)\s+', output) schedule_for_cleanup(project_id) # Create and upload a file filename = utils.make_bogus_data_file() schedule_for_cleanup(filename) output = run('synapse', '--skip-checks', 'store', '--description', 'Bogus data to test file upload', '--parentid', project_id, '--file', filename) file_entity_id = parse(r'Created/Updated entity:\s+(syn\d+)\s+', output) # Verify that we stored the file in Synapse f1 = syn.get(file_entity_id) fh = syn._getFileHandle(f1.dataFileHandleId) assert_equals(fh['concreteType'], 'org.sagebionetworks.repo.model.file.S3FileHandle') # Test that entity is named after the file it contains assert_equals(f1.name, os.path.basename(filename)) # Create an Evaluation to submit to eval = Evaluation(name=str(uuid.uuid4()), contentSource=project_id) eval = syn.store(eval) schedule_for_cleanup(eval) # Submit a bogus file output = run('synapse', '--skip-checks', 'submit', '--evaluation', eval.id, '--name', 'Some random name', '--entity', file_entity_id) submission_id = parse(r'Submitted \(id: (\d+)\) entity:\s+', output) # testing different commmand line options for submitting to an evaluation # submitting to an evaluation by evaluationID output = run('synapse', '--skip-checks', 'submit', '--evalID', eval.id, '--name', 'Some random name', '--alias', 'My Team', '--entity', file_entity_id) submission_id = parse(r'Submitted \(id: (\d+)\) entity:\s+', output) # Update the file filename = utils.make_bogus_data_file() schedule_for_cleanup(filename) output = run('synapse', '--skip-checks', 'store', '--id', file_entity_id, '--file', filename) updated_entity_id = parse(r'Updated entity:\s+(syn\d+)', output) schedule_for_cleanup(updated_entity_id) # Submit an updated bogus file and this time by evaluation name output = run('synapse', '--skip-checks', 'submit', '--evaluationName', eval.name, '--entity', file_entity_id) # Tests shouldn't have external dependencies, but here it's required ducky_url = 'https://www.synapse.org/Portal/clear.cache.gif' # Test external file handle output = run('synapse', '--skip-checks', 'store', '--name', 'Rubber Ducky', '--description', 'I like rubber duckies', '--parentid', project_id, '--file', ducky_url) exteral_entity_id = parse(r'Created/Updated entity:\s+(syn\d+)\s+', output) schedule_for_cleanup(exteral_entity_id) # Verify that we created an external file handle f2 = syn.get(exteral_entity_id) fh = syn._getFileHandle(f2.dataFileHandleId) assert_equals(fh['concreteType'], 'org.sagebionetworks.repo.model.file.ExternalFileHandle') # submit an external file to an evaluation and use provenance filename = utils.make_bogus_data_file() schedule_for_cleanup(filename) repo_url = 'https://github.com/Sage-Bionetworks/synapsePythonClient' run('synapse', '--skip-checks', 'submit', '--evalID', eval.id, '--file', filename, '--parent', project_id, '--used', exteral_entity_id, '--executed', repo_url) # Delete project run('synapse', '--skip-checks', 'delete', project_id)
def test_evaluations(): # Create an Evaluation name = 'Test Evaluation %s' % str(uuid.uuid4()) ev = Evaluation(name=name, description='Evaluation for testing', contentSource=project['id'], status='CLOSED') ev = syn.store(ev) try: # -- Get the Evaluation by name evalNamed = syn.getEvaluationByName(name) assert ev['contentSource'] == evalNamed['contentSource'] assert ev['createdOn'] == evalNamed['createdOn'] assert ev['description'] == evalNamed['description'] assert ev['etag'] == evalNamed['etag'] assert ev['id'] == evalNamed['id'] assert ev['name'] == evalNamed['name'] assert ev['ownerId'] == evalNamed['ownerId'] assert ev['status'] == evalNamed['status'] # -- Get the Evaluation by project evalProj = syn.getEvaluationByContentSource(project) evalProj = next(evalProj) assert ev['contentSource'] == evalProj['contentSource'] assert ev['createdOn'] == evalProj['createdOn'] assert ev['description'] == evalProj['description'] assert ev['etag'] == evalProj['etag'] assert ev['id'] == evalProj['id'] assert ev['name'] == evalProj['name'] assert ev['ownerId'] == evalProj['ownerId'] assert ev['status'] == evalProj['status'] # Update the Evaluation ev['status'] = 'OPEN' ev = syn.store(ev, createOrUpdate=True) assert ev.status == 'OPEN' # # Add the current user as a participant myOwnerId = int(syn.getUserProfile()['ownerId']) syn._allowParticipation(ev, myOwnerId) # AUTHENTICATED_USERS = 273948 # PUBLIC = 273949 syn.setPermissions(ev, 273948, accessType=['READ']) syn.setPermissions(ev, 273949, accessType=['READ']) # test getPermissions permissions = syn.getPermissions(ev, 273949) assert ['READ'] == permissions permissions = syn.getPermissions(ev, syn.getUserProfile()['ownerId']) assert [ p in permissions for p in [ 'READ', 'CREATE', 'DELETE', 'UPDATE', 'CHANGE_PERMISSIONS', 'READ_PRIVATE_SUBMISSION' ] ] # Test getSubmissions with no Submissions (SYNR-453) submissions = syn.getSubmissions(ev) assert len(list(submissions)) == 0 # -- Get a Submission attachment belonging to another user (SYNR-541) -- # See if the configuration contains test authentication if other_user['username']: print("Testing SYNR-541") # Login as the test user testSyn = client.Synapse(skip_checks=True) testSyn.login(email=other_user['username'], password=other_user['password']) testOwnerId = int(testSyn.getUserProfile()['ownerId']) # Make a project other_project = Project(name=str(uuid.uuid4())) other_project = testSyn.createEntity(other_project) # Give the test user permission to read and join the evaluation syn._allowParticipation(ev, testOwnerId) # Make a file to submit with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: filename = f.name f.write(str(random.gauss(0, 1)) + '\n') f = File(filename, parentId=other_project.id, name='Submission 999', description="Haha! I'm inaccessible...") entity = testSyn.store(f) ## test submission by evaluation ID submission = testSyn.submit(ev.id, entity, submitterAlias="My Nickname") # Mess up the cached file so that syn._getWithEntityBundle must download again os.utime(filename, (0, 0)) # Grab the Submission as the original user fetched = syn.getSubmission(submission['id']) assert os.path.exists(fetched['filePath']) # make sure the fetched file is the same as the original (PLFM-2666) assert filecmp.cmp(filename, fetched['filePath']) else: print( 'Skipping test for SYNR-541: No [test-authentication] in %s' % client.CONFIG_FILE) # Increase this to fully test paging by getEvaluationSubmissions # not to be less than 2 num_of_submissions = 2 # Create a bunch of Entities and submit them for scoring print("Creating Submissions") for i in range(num_of_submissions): with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: filename = f.name f.write(str(random.gauss(0, 1)) + '\n') f = File(filename, parentId=project.id, name='entry-%02d' % i, description='An entry for testing evaluation') entity = syn.store(f) syn.submit(ev, entity, name='Submission %02d' % i, submitterAlias='My Team') # Score the submissions submissions = syn.getSubmissions(ev, limit=num_of_submissions - 1) print("Scoring Submissions") for submission in submissions: assert re.match('Submission \d+', submission['name']) status = syn.getSubmissionStatus(submission) status.score = random.random() if submission['name'] == 'Submission 01': status.status = 'INVALID' status.report = 'Uh-oh, something went wrong!' else: status.status = 'SCORED' status.report = 'a fabulous effort!' syn.store(status) # Annotate the submissions print("Annotating Submissions") bogosity = {} submissions = syn.getSubmissions(ev) b = 123 for submission, status in syn.getSubmissionBundles(ev): bogosity[submission.id] = b a = dict(foo='bar', bogosity=b) b += 123 status['annotations'] = to_submission_status_annotations(a) set_privacy(status['annotations'], key='bogosity', is_private=False) syn.store(status) # Test that the annotations stuck for submission, status in syn.getSubmissionBundles(ev): a = from_submission_status_annotations(status.annotations) assert a['foo'] == 'bar' assert a['bogosity'] == bogosity[submission.id] for kvp in status.annotations['longAnnos']: if kvp['key'] == 'bogosity': assert kvp['isPrivate'] == False # test query by submission annotations # These queries run against an eventually consistent index table which is # populated by an asynchronous worker. Thus, the queries may remain out # of sync for some unbounded, but assumed to be short time. attempts = 2 while attempts > 0: try: print("Querying for submissions") results = syn.restGET( "/evaluation/submission/query?query=SELECT+*+FROM+evaluation_%s" % ev.id) print(results) assert len(results['rows']) == num_of_submissions + 1 results = syn.restGET( "/evaluation/submission/query?query=SELECT+*+FROM+evaluation_%s where bogosity > 200" % ev.id) print(results) assert len(results['rows']) == num_of_submissions except AssertionError as ex1: print("failed query: ", ex1) attempts -= 1 if attempts > 0: print("retrying...") time.sleep(2) else: attempts = 0 ## Test that we can retrieve submissions with a specific status invalid_submissions = list(syn.getSubmissions(ev, status='INVALID')) assert len(invalid_submissions) == 1, len(invalid_submissions) assert invalid_submissions[0]['name'] == 'Submission 01' finally: # Clean up syn.delete(ev) if 'testSyn' in locals(): if 'other_project' in locals(): # Clean up, since the current user can't access this project # This also removes references to the submitted object :) testSyn.delete(other_project) if 'team' in locals(): ## remove team testSyn.delete(team) ## Just deleted it. Shouldn't be able to get it. assert_raises(SynapseHTTPError, syn.getEvaluation, ev)
def test_evaluations(): ## create a new project project = create_project() name = 'Test Evaluation %s' % (str(uuid.uuid4()), ) try: #Create evaluation ev = Evaluation(name=name, description='Evaluation for testing', contentSource=project['id'], status='CLOSED') ev = syn.store(ev) #Update evaluation ev['status'] = 'OPEN' ev = syn.store(ev, createOrUpdate=True) assert ev.status == 'OPEN' ## add the current user as a participant user = syn.getUserProfile() syn.addEvaluationParticipant(ev, user['ownerId']) ## test getSubmissions with no submissions (SYNR-453) submissions = syn.getSubmissions(ev) assert len(list(submissions)) == 0 ## increase this to fully test paging by getEvaluationSubmissions num_of_submissions = 3 ## create a bunch of entities and submit them for evaluation sys.stdout.write('\ncreating evaluation submissions') for i in range(num_of_submissions): try: (fd, filename) = tempfile.mkstemp() with os.fdopen(fd, 'w') as f: f.write(str(random.gauss(0, 1))) f.write('\n') f = File(filename, parentId=project.id, name='entry-%02d' % i, description='An entry for testing evaluation') entity = syn.store(f) syn.submit(ev, entity) finally: os.remove(filename) sys.stdout.write('.') sys.stdout.flush() ## score the submissions submissions = syn.getSubmissions(ev) sys.stdout.write('\nscoring submissions') for submission in submissions: status = syn.getSubmissionStatus(submission) status.score = random.random() status.status = 'SCORED' status.report = 'a fabulous effort!' syn.store(status) sys.stdout.write('.') sys.stdout.flush() sys.stdout.write('\n') finally: syn.delete(ev) ## make sure it's deleted try: ev = syn.getEvaluation(ev) except Exception as e: print e assert e.response.status_code == 404