def test_description_updated_sample_must_be_exported(requireMocking): for x in range(0, 10): # upload data data = json.dumps({ "job": "1234567", "sample": "abc_{}".format(x), "state": "scheduled" }) result = tracking.create({'body': data}, {}) result = tracking.description({ "pathParameters": { "job": "1234567", "psize": 25, } }, {}) assert result is not None assert result['statusCode'] == 200 assert len(json.loads(result['body'])) == 10 for x in json.loads(result['body']): print(x)
def test_description(requireMocking): for x in range(0, 40): # upload data data = json.dumps({ "job": "1234567", "sample": "abc_{}".format(x), "state": "scheduled" }) result = tracking.create({'body': data}, {}) result = tracking.description({ "pathParameters": { "job": "1234567", "psize": 25, } }, {}) assert result is not None assert result['statusCode'] == 200 samples = json.loads(result['body']) assert len(samples) == 25 result = tracking.description({ "pathParameters": { "job": "1234567", "psize": 25, "last_key": samples[-1]['id'] } }, {}) assert result is not None assert result['statusCode'] == 200 assert len(json.loads(result['body'])) == 15
def test_status(requireMocking): for x in range(0, 100): # upload data data = json.dumps({ "job": "123456", "sample": "abc_{}".format(x), "state": "scheduled" }) # store in stasis or we doomed!!! response = create.create({'body': json.dumps({'sample': 'abc_{}'.format(x), 'status': 'scheduled'})}, {}) # store in the job state table result = tracking.create({'body': data}, {}) assert result['statusCode'] == 200 set_job_state(job="123456", method="test", profile="test", state=SCHEDULED) result = tracking.status({ "pathParameters": { "job": "123456", } }, {}) assert result is not None assert result['statusCode'] == 200 assert json.loads(result['body'])['job_state'] == 'scheduled' for x in range(0, 10): # pretend stasis has now exported the data response = create.create({'body': json.dumps({'sample': 'abc_{}'.format(x), 'status': 'exported'})}, {}) calculate_job_state("123456") result = tracking.status({ "pathParameters": { "job": "123456", } }, {}) # since not yet aggregated, the job should be in state processing assert json.loads(result['body'])['job_state'] == 'processing' for x in range(0, 100): # pretend all samples are finished response = create.create({'body': json.dumps({'sample': 'abc_{}'.format(x), 'status': "finished"})}, {}) result = tracking.status({ "pathParameters": { "job": "123456", } }, {}) monitor_jobs({}, {}) assert json.loads(result['body'])['job_state'] == PROCESSING result = tracking.status({ "pathParameters": { "job": "123456", }, "body": json.dumps({ 'job_state': FAILED }) }, {}) assert json.loads(result['body'])['job_state'] == FAILED
def test_create_and_get(requireMocking): # upload data data = json.dumps({ "job": "12345", "sample": "abc", "state": "scheduled" }) result = tracking.create({'body': data}, {}) # ensure status is correct assert result is not None assert result['statusCode'] == 200 result = tracking.get({ "pathParameters": { "sample": "abc", "job": "12345" } }, {}) assert result is not None assert result['statusCode'] == 200 assert 'body' in result result = json.loads(result['body']) # upload new data # ensure status is updated now assert 'job' in result assert 'sample' in result assert 'state' in result assert 'id' in result assert result['id'] == '12345_abc' assert result['state'] == 'scheduled' # check if update works correctly data = json.dumps({ "job": "12345", "sample": "abc", "state": "processing" }) result = tracking.create({'body': data}, {}) calculate_job_state("12345") result = tracking.get({ "pathParameters": { "sample": "abc", "job": "12345" } }, {}) assert result is not None assert result['statusCode'] == 200 assert 'body' in result result = json.loads(result['body']) assert result['state'] == 'processing' print(result) assert 'past_states' in result assert 'scheduled' in result['past_states']
def test_job_is_in_state_exported_even_with_a_failed_sample(requireMocking, mocked_10_sample_job): for i in range(0, 10): tracking.create({'body': json.dumps( { "job": "12345", "sample": "abc_{}".format(i), "state": "scheduled" } )}, {}) assert load_job_samples_with_states("12345")['abc_{}'.format(i)] == "scheduled" # dummy stasis data which need to be in the system for this test to pass new_states = [ { "sample": "abc_{}".format(0), "fileHandle": "abc_{}.d".format(0), "priority": 1, "time": 1563307359163, "value": EXPORTED }, { "sample": "abc_{}".format(1), "fileHandle": "abc_{}.d".format(1), "priority": 100, "time": 1563307360393, "value": EXPORTED }, { "sample": "abc_{}".format(2), "fileHandle": "abc_{}.mzml".format(2), "priority": 200, "time": 1563307361543, "value": EXPORTED }, { "sample": "abc_{}".format(3), "fileHandle": "abc_{}.mzml".format(3), "priority": 300, "time": 1563330092360, "value": EXPORTED }, { "sample": "abc_{}".format(4), "fileHandle": "abc_{}.mzml".format(4), "priority": 410, "time": 1563330183632, "value": "exported" }, { "sample": "abc_{}".format(5), "fileHandle": "abc_{}.mzml".format(5), "priority": 420, "time": 1563330184868, "value": EXPORTED }, { "sample": "abc_{}".format(6), "fileHandle": "abc_{}.mzml".format(6), "priority": 430, "time": 1563330189108, "value": FAILED }, { "sample": "abc_{}".format(7), "fileHandle": "abc_{}.mzml".format(7), "priority": 440, "time": 1563330190650, "value": EXPORTED }, { "sample": "abc_{}".format(8), "fileHandle": "abc_{}.mzml".format(8), "priority": 900, "time": 1563330244348, "value": EXPORTED }, { "sample": "abc_{}".format(9), "fileHandle": "abc_{}.mzml".format(8), "priority": 900, "time": 1563330244348, "value": EXPORTED } ] for x in new_states: set_sample_job_state( sample=x['sample'], job="12345", state=x['value'], reason="it's a test" ) state = calculate_job_state(job="12345") assert state == EXPORTED
def test_sync_currently_processing(requireMocking, mocked_10_sample_job): tm = TableManager() for i in range(0, 10): tracking.create({'body': json.dumps( { "job": "12345", "sample": "abc_{}".format(i), "state": SCHEDULED } )}, {}) assert load_job_samples_with_states("12345")['abc_{}'.format(i)] == "scheduled" # dummy stasis data which need to be in the system for this test to pass tm.get_tracking_table().put_item(Item= { "experiment": "12345", "id": "abc_{}".format(i), "sample": "abc_{}".format(i), "status": [ { "fileHandle": "abc_{}.d".format(i), "priority": 1, "time": 1563307359163, "value": "entered" }, { "fileHandle": "abc_{}.d".format(i), "priority": 100, "time": 1563307360393, "value": "acquired" }, { "fileHandle": "abc_{}.mzml".format(i), "priority": 200, "time": 1563307361543, "value": "converted" }, { "fileHandle": "abc_{}.mzml".format(i), "priority": 300, "time": 1563330092360, "value": "scheduled" }, { "fileHandle": "abc_{}.mzml".format(i), "priority": 410, "time": 1563330183632, "value": "deconvoluted" }, { "fileHandle": "abc_{}.mzml".format(i), "priority": 420, "time": 1563330184868, "value": "corrected" }, { "fileHandle": "abc_{}.mzml".format(i), "priority": 430, "time": 1563330189108, "value": "annotated" }, { "fileHandle": "abc_{}.mzml".format(i), "priority": 440, "time": 1563330190650, "value": "quantified" }, { "fileHandle": "abc_{}.mzml".format(i), "priority": 450, "time": 1563330244348, "value": "replaced" } ] } ) calculate_job_state(job="12345") assert all(value == str(REPLACED) for value in load_job_samples_with_states("12345").values())