def test_purge_failure(app, admin, user, jobstate_user_id, job_user_id, team_user_id): # create two files and archive them file_id1 = t_utils.post_file(user, jobstate_user_id, FileDesc('kikoolol', 'content')) user.delete('/api/v1/files/%s' % file_id1) file_id2 = t_utils.post_file(user, jobstate_user_id, FileDesc('kikoolol2', 'content2')) user.delete('/api/v1/files/%s' % file_id2) to_purge = admin.get('/api/v1/files/purge').data assert len(to_purge['files']) == 2 # purge will fail with mock.patch('dci.stores.filesystem.FileSystem.delete') as mock_delete: mock_delete.side_effect = dci_exc.StoreExceptions('error') purge_res = admin.post('/api/v1/files/purge') assert purge_res.status_code == 400 path1 = files_utils.build_file_path(team_user_id, job_user_id, file_id1) path2 = files_utils.build_file_path(team_user_id, job_user_id, file_id2) store = dci_config.get_store('files') store.get(path1) store.get(path2) to_purge = admin.get('/api/v1/files/purge').data assert len(to_purge['files']) == 2
def test_essync_add_files(user, jobstate_user_id): for i in range(5): utils.post_file(user, jobstate_user_id, utils.FileDesc('kikoolol', 'content')) env = { 'DCI_CS_URL': 'http://127.0.0.1:5000', 'DCI_LOGIN': '******', 'DCI_PASSWORD': '******' } status = utils.run_bin('dci-essync', env=env) import pprint pprint.pprint(status.communicate()) assert status.returncode == 0 assert es_engine.get_last_sequence(doc_type='logs') == 5
def test_known_issues_in_tests(admin, user, job_user_id, topic_user_id): pissue = user.post('/api/v1/issues', data={'url': 'http://bugzilla/42', 'topic_id': topic_user_id}) pissue_id1 = pissue.data['issue']['id'] pissue = user.post('/api/v1/issues', data={'url': 'http://bugzilla/43', 'topic_id': topic_user_id}) pissue_id2 = pissue.data['issue']['id'] test = user.post('/api/v1/tests', data={'name': 'Testsuite_1:test_3'}) test_id1 = test.data['test']['id'] user.post('/api/v1/issues/%s/tests' % pissue_id1, data={'test_id': test_id1}) user.post('/api/v1/issues/%s/tests' % pissue_id2, data={'test_id': test_id1}) data = {'job_id': job_user_id, 'status': 'failure'} jobstate_1 = admin.post('/api/v1/jobstates', data=data).data['jobstate'] file_id = t_utils.post_file( admin, jobstate_1['id'], FileDesc('Tempest', tests_data.jobtest_two), mime='application/junit' ) testscases = admin.get( '/api/v1/files/%s/testscases' % file_id).data["testscases"] for testcase in testscases: if testcase['name'] == 'Testsuite_1:test_3': assert len(testcase['issues']) == 2 issues_ids = {issue['id'] for issue in testcase['issues']} assert issues_ids == {pissue_id1, pissue_id2}
def test_purge_failure(user, admin, job_user_id, jobstate_user_id, team_user_id): job = user.get('/api/v1/jobs/%s' % job_user_id).data['job'] # create a file file_id1 = t_utils.post_file(user, jobstate_user_id, FileDesc('kikoolol', 'content')) djob = admin.delete('/api/v1/jobs/%s' % job_user_id, headers={'If-match': job['etag']}) assert djob.status_code == 204 to_purge_jobs = admin.get('/api/v1/jobs/purge').data assert len(to_purge_jobs['jobs']) == 1 to_purge_files = admin.get('/api/v1/files/purge').data assert len(to_purge_files['files']) == 1 # purge will fail with mock.patch('dci.stores.filesystem.FileSystem.delete') as mock_delete: mock_delete.side_effect = dci_exc.StoreExceptions('error') purge_res = admin.post('/api/v1/jobs/purge') assert purge_res.status_code == 400 path1 = files_utils.build_file_path(team_user_id, job_user_id, file_id1) store = dci_config.get_store('files') # because the delete fail the backend didn't remove the files and the # files are still in the database store.get(path1) to_purge_files = admin.get('/api/v1/files/purge').data assert len(to_purge_files['files']) == 1 to_purge_jobs = admin.get('/api/v1/jobs/purge').data assert len(to_purge_jobs['jobs']) == 1
def test_purge(user, admin, job_user_id, jobstate_user_id, team_user_id): job = user.get('/api/v1/jobs/%s' % job_user_id).data['job'] # create a file file_id1 = t_utils.post_file(user, jobstate_user_id, FileDesc('kikoolol', 'content')) djob = admin.delete('/api/v1/jobs/%s' % job_user_id, headers={'If-match': job['etag']}) assert djob.status_code == 204 to_purge_jobs = admin.get('/api/v1/jobs/purge').data assert len(to_purge_jobs['jobs']) == 1 to_purge_files = admin.get('/api/v1/files/purge').data assert len(to_purge_files['files']) == 1 admin.post('/api/v1/jobs/purge') path1 = files_utils.build_file_path(team_user_id, job_user_id, file_id1) store = dci_config.get_store('files') # the purge removed the file from the backend, get() must raise exception with pytest.raises(dci_exc.StoreExceptions): store.get(path1) to_purge_jobs = admin.get('/api/v1/jobs/purge').data assert len(to_purge_jobs['jobs']) == 0 to_purge_files = admin.get('/api/v1/files/purge').data assert len(to_purge_files['files']) == 0
def test_files(m_datetime, admin, user_sso_rh_employee, app, engine, jobstate_user_id, job_user_id): user_sso = user_sso_rh_employee m_utcnow = mock.MagicMock() m_utcnow.utctimetuple.return_value = datetime.datetime. \ fromtimestamp(1518653629).timetuple() m_datetime.utcnow.return_value = m_utcnow with app.app_context(): flask.g.db_conn = engine.connect() # get all files files = user_sso.get('/api/v1/jobs/%s/files' % job_user_id) assert files.status_code == 200 # get file content with mock.patch(SWIFT, spec=Swift) as mock_swift: mockito = mock.MagicMock() head_result = { 'etag': utils.gen_etag(), 'content-type': "stream", 'content-length': 7 } mockito.head.return_value = head_result mockito.get.return_value = [ head_result, six.StringIO("azertyuiop1234567890")] mock_swift.return_value = mockito content = "azertyuiop1234567890" file_id = t_utils.post_file(admin, jobstate_user_id, FileDesc('foo', content)) get_file = user_sso.get('/api/v1/files/%s' % file_id) assert get_file.status_code == 200
def test_create_files(user, jobstate_user_id): file_id = t_utils.post_file(user, jobstate_user_id, FileDesc('kikoolol', 'content')) file = user.get('/api/v1/files/%s' % file_id).data['file'] assert file['name'] == 'kikoolol' assert file['size'] == 7
def test_get_file_by_id(user, jobstate_user_id): file_id = t_utils.post_file(user, jobstate_user_id, FileDesc('kikoolol', '')) # get by uuid created_file = user.get('/api/v1/files/%s' % file_id) assert created_file.status_code == 200 assert created_file.data['file']['name'] == 'kikoolol'
def test_get_file_content_as_user(user, jobstate_user_id): content = "azertyuiop1234567890" file_id = t_utils.post_file(user, jobstate_user_id, FileDesc('foo', content)) get_file = user.get('/api/v1/files/%s/content' % file_id) assert get_file.status_code == 200 assert get_file.data == content
def test_files_events_create(admin, user, jobstate_user_id, team_user_id): file_id = utils.post_file(user, jobstate_user_id, utils.FileDesc('kikoolol', 'content')) f_events = admin.get('/api/v1/files_events/0') assert f_events.status_code == 200 f_events_data = f_events.data assert f_events_data['files'][0]['event']['file_id'] == file_id assert f_events_data['files'][0]['event']['action'] == models.FILES_CREATE
def test_files_events_delete(admin, user, jobstate_user_id, team_user_id): file_id = utils.post_file(user, jobstate_user_id, utils.FileDesc('kikoolol', 'content')) admin.delete('/api/v1/files/%s' % file_id) f_events = admin.get('/api/v1/files_events/1?sort=id') assert f_events.status_code == 200 f_events_data = f_events.data assert f_events_data['files'][1]['event']['file_id'] == file_id assert f_events_data['files'][1]['event']['action'] == models.FILES_DELETE
def test_delete_file_by_id(user, jobstate_user_id): file_id = t_utils.post_file(user, jobstate_user_id, FileDesc('name', '')) url = '/api/v1/files/%s' % file_id created_file = user.get(url) assert created_file.status_code == 200 deleted_file = user.delete(url) assert deleted_file.status_code == 204 gfile = user.get(url) assert gfile.status_code == 404
def test_upload_tests_with_regressions_successfix(admin, remoteci_context, remoteci, topic): headers = { 'User-Agent': 'python-dciclient', 'Client-Version': 'python-dciclient_0.1.0' } # 1. schedule two jobs and create their jobstate data = {'topic_id': topic['id'], 'remoteci_id': remoteci['id']} job_1 = remoteci_context.post('/api/v1/jobs/schedule', headers=headers, data=data).data['job'] job_2 = remoteci_context.post('/api/v1/jobs/schedule', headers=headers, data=data).data['job'] # 2. create the associated jobstates for each job data = {'job_id': job_1['id'], 'status': 'success'} jobstate_1 = admin.post('/api/v1/jobstates', data=data).data['jobstate'] data = {'job_id': job_2['id'], 'status': 'failure'} jobstate_2 = admin.post('/api/v1/jobstates', data=data).data['jobstate'] f_1 = t_utils.post_file(admin, jobstate_1['id'], FileDesc('Tempest', tests_data.jobtest_one), mime='application/junit') assert f_1 is not None t_utils.post_file(admin, jobstate_1['id'], FileDesc('Rally', tests_data.jobtest_one), mime='application/junit') f_2 = t_utils.post_file(admin, jobstate_2['id'], FileDesc('Tempest', tests_data.jobtest_two), mime='application/junit') assert f_2 is not None t_utils.post_file(admin, jobstate_2['id'], FileDesc('Rally', tests_data.jobtest_one), mime='application/junit') # 3. verify regression in job_2's result which is 'test_3' job_2_results = admin.get( '/api/v1/jobs/%s?embed=results' % job_2['id']).data['job']['results'] for job_res in job_2_results: if job_res['name'] == 'Tempest': assert job_res['regressions'] == 1 assert job_res['successfixes'] == 1 elif job_res['name'] == 'Rally': assert job_res['regressions'] == 0 assert job_res['successfixes'] == 0 tcs = admin.get('/api/v1/files/%s/testscases' % f_2).data['testscases'] assert tcs[0]['successfix'] assert not tcs[0]['regression'] assert not tcs[1]['successfix'] assert not tcs[1]['regression'] assert not tcs[2]['successfix'] assert tcs[2]['regression']
def test_purge(app, admin, user, jobstate_user_id, team_user_id, job_user_id): # create two files and archive them file_id1 = t_utils.post_file(user, jobstate_user_id, FileDesc('kikoolol', 'content')) user.delete('/api/v1/files/%s' % file_id1) file_id2 = t_utils.post_file(user, jobstate_user_id, FileDesc('kikoolol2', 'content2')) user.delete('/api/v1/files/%s' % file_id2) to_purge = admin.get('/api/v1/files/purge').data assert len(to_purge['files']) == 2 admin.post('/api/v1/files/purge') path1 = files_utils.build_file_path(team_user_id, job_user_id, file_id1) store = dci_config.get_store('files') # the purge removed the file from the backend, get() must raise exception with pytest.raises(dci_exc.StoreExceptions): store.get(path1) path2 = files_utils.build_file_path(team_user_id, job_user_id, file_id2) with pytest.raises(dci_exc.StoreExceptions): store.get(path2) to_purge = admin.get('/api/v1/files/purge').data assert len(to_purge['files']) == 0
def test_get_file_with_embed_not_valid(user, jobstate_user_id): file_id = t_utils.post_file(user, jobstate_user_id, FileDesc('name', '')) file = user.get('/api/v1/files/%s?embed=mdr' % file_id) assert file.status_code == 400
def test_compare_performance(user, remoteci_context, team_user_id, topic, topic_user_id): # noqa # create the baseline job job_baseline = remoteci_context.post( '/api/v1/jobs/schedule', data={'topic_id': topic['id']} ) job_baseline = job_baseline.data['job'] data = {'job_id': job_baseline['id'], 'status': 'success'} js_baseline = remoteci_context.post( '/api/v1/jobstates', data=data).data['jobstate'] f_1 = t_utils.post_file(user, js_baseline['id'], FileDesc('PBO_Results', tests_data.jobtest_one), mime='application/junit') assert f_1 is not None f_11 = t_utils.post_file(user, js_baseline['id'], FileDesc('Tempest', tests_data.jobtest_one), mime='application/junit') assert f_11 is not None # create the second job job2 = remoteci_context.post( '/api/v1/jobs/schedule', data={'topic_id': topic['id']} ) job2 = job2.data['job'] data = {'job_id': job2['id'], 'status': 'success'} js_job2 = remoteci_context.post( '/api/v1/jobstates', data=data).data['jobstate'] f_2 = t_utils.post_file(user, js_job2['id'], FileDesc('PBO_Results', tests_data.jobtest_two), mime='application/junit') assert f_2 is not None f_22 = t_utils.post_file(user, js_job2['id'], FileDesc('Tempest', tests_data.jobtest_two), mime='application/junit') assert f_22 is not None res = user.post('/api/v1/performance', headers={'Content-Type': 'application/json'}, data={'base_job_id': job_baseline['id'], 'jobs': [job2['id']]}) expected = {'Testsuite_1/test_1': 20., 'Testsuite_1/test_2': -25., 'Testsuite_1/test_3[id-2fc6822e-b5a8-42ed-967b-11d86e881ce3,smoke]': 25.} # noqa perf = res.data['performance'] for tests in perf: filename = list(tests.keys())[0] for t in tests[filename]: if t['job_id'] == job_baseline['id']: for tc in t['testscases']: assert tc['delta'] == 0. else: for tc in t['testscases']: k = '%s/%s' % (tc['classname'], tc['name']) assert expected[k] == tc['delta']