def test_add_regressions_successfix(): old_junit = """ <testsuite errors="0" failures="60" name="" tests="2289" time="3385.127"> <testcase classname="Testsuite1" name="test_1" time="28.810"> <failure type="Exception">Traceback</failure> </testcase> <testcase classname="Testsuite1" name="test_2" time="29.419" /> <testcase classname="Testsuite1" name="test_3" time="29.419" /> </testsuite> """ new_junit = """ <testsuite errors="0" failures="60" name="" tests="2289" time="3385.127"> <testcase classname="Testsuite1" name="test_1" time="28.810" /> <testcase classname="Testsuite1" name="test_2" time="29.419"> <failure type="Exception">Traceback</failure> </testcase> <testcase classname="Testsuite1" name="test_3" time="29.419"> <failure type="Exception">Traceback</failure> </testcase> </testsuite> """ old_tests = transformations.junit2dict(BytesIO(old_junit.encode('utf-8'))) new_tests = transformations.junit2dict(BytesIO(new_junit.encode('utf-8'))) tests = transformations.add_regressions_and_successfix_to_tests( old_tests, new_tests) assert tests['successfixes'] == 1 assert tests['regressions'] == 2
def get_all_results_from_jobs(user, j_id): """Get all results from job. """ job = v1_utils.verify_existence_and_get(j_id, _TABLE) if not user.is_in_team(job['team_id']): raise auth.UNAUTHORIZED swift = dci_config.get_store('files') job_files = json.loads(files.get_all_files(j_id).response[0])['files'] r_files = [file for file in job_files if file['mime'] == 'application/junit'] results = [] for file in r_files: file_path = swift.build_file_path(file['team_id'], j_id, file['id']) _, file_descriptor = swift.get(file_path) data = tsfm.junit2dict(file_descriptor.read()) results.append({'filename': file['name'], 'name': file['name'], 'total': data['total'], 'failures': data['failures'], 'errors': data['errors'], 'skips': data['skips'], 'time': data['time'], 'success': data['success'], 'testscases': data['testscases']}) return flask.jsonify({'results': results, '_meta': {'count': len(results)}})
def get_performance_tests(baseline_tests, tests): res = [] # baseline_tests is processed first because file descriptor # is fully read (junit2dict) once base_dict = transformations.junit2dict(baseline_tests['fd']) base_dict_testscases = base_dict['testscases'] base_dict = _keytify_test_cases(base_dict['testscases']) test = _add_delta_to_tests(base_dict, base_dict_testscases) res.append({"job_id": baseline_tests['job_id'], "testscases": test}) for t in tests: test = transformations.junit2dict(t['fd']) test = _add_delta_to_tests(base_dict, test['testscases']) res.append({"job_id": t['job_id'], "testscases": test}) return res
def test_junit2dict_with_big_xml(): with open('tests/data/run_nxos_integration_tests.xml', 'r') as f: result = transformations.junit2dict(f) assert result['success'] == 10311 assert result['errors'] == 2 assert result['failures'] == 4 assert result['skips'] == 1165 assert result['total'] == 11482 assert result['time'] == 17010679 assert len(result['testscases']) == 11482
def test_junit2dict_with_rally_xml(): with open('tests/data/rally-results.xml', 'r') as f: result = transformations.junit2dict(f) assert result['success'] == 16 assert result['errors'] == 0 assert result['failures'] == 0 assert result['skips'] == 0 assert result['total'] == 16 assert result['time'] == 1186390 assert len(result['testscases']) == 16
def test_junit2dict_with_tempest_xml(): with open('tests/data/tempest-results.xml', 'r') as f: result = transformations.junit2dict(f) assert result['success'] == 117 assert result['errors'] == 0 assert result['failures'] == 0 assert result['skips'] == 13 assert result['total'] == 130 assert result['time'] == 1308365 assert len(result['testscases']) == 130
def test_junit2dict_with_ansible_run_ovs_integration_tests_xml(): with open('tests/data/ansible-run-ovs-integration-tests.xml', 'r') as f: result = transformations.junit2dict(f) assert result['success'] == 16 assert result['errors'] == 0 assert result['failures'] == 0 assert result['skips'] == 1 assert result['total'] == 17 assert result['time'] == 42321 assert len(result['testscases']) == 17
def test_junit2dict_with_ansible_run_vyos_integration_tests_xml(): with open('tests/data/ansible-run-vyos-integration-tests.xml', 'rb') as f: result = transformations.junit2dict(f) assert result['success'] == 293 assert result['errors'] == 0 assert result['failures'] == 0 assert result['skips'] == 10 assert result['total'] == 303 assert result['time'] == 368722 assert len(result['testscases']) == 303
def test_junit2dict_with_ansible_run_ovs_integration_tests_xml(): with open('tests/data/ansible-run-ovs-integration-tests.xml', 'r') as f: content_file = f.read() result = transformations.junit2dict(content_file) assert result['success'] == 7 assert result['errors'] == 0 assert result['failures'] == 1 assert result['skips'] == 1 assert result['total'] == 9 assert result['time'] == 3536 assert len(result['testscases']) == 9
def get_file_testscases(user, file_id): file = get_file_object(file_id) if (user.is_not_in_team(file['team_id']) and user.is_not_read_only_user() and user.is_not_epm()): raise dci_exc.Unauthorized() file_descriptor = get_file_descriptor(file) jsonunit = tsfm.junit2dict(file_descriptor) job = v1_utils.verify_existence_and_get(file['job_id'], models.JOBS) previous_jsonunit = _get_previous_jsonunit(job, file['name']) jsonunit = _compute_regressions_successfix(jsonunit, previous_jsonunit) return flask.Response(json.dumps({"testscases": jsonunit["testscases"]}), 200, content_type='application/json')
def _get_previous_jsonunit(job, filename): prev_job = get_previous_job_in_topic(job) if prev_job is None: return None query = sql.select([models.TESTS_RESULTS]). \ where(sql.and_(models.TESTS_RESULTS.c.job_id == prev_job['id'], models.TESTS_RESULTS.c.name == filename)) res = flask.g.db_conn.execute(query).fetchone() if res is None: return None test_file = get_file_object(res.file_id) file_descriptor = get_file_descriptor(test_file) return tsfm.junit2dict(file_descriptor)
def _process_junit_file(values, junit_file, job): jsonunit = tsfm.junit2dict(junit_file) previous_jsonunit = _get_previous_jsonunit(job, values['name']) jsonunit = _compute_regressions_successfix(jsonunit, previous_jsonunit) jsonunit = _compute_known_tests_cases(jsonunit, job) query = models.TESTS_RESULTS.insert().values({ 'id': utils.gen_uuid(), 'created_at': values['created_at'], 'updated_at': datetime.datetime.utcnow().isoformat(), 'file_id': values['id'], 'job_id': job['id'], 'name': values['name'], 'success': jsonunit['success'], 'failures': jsonunit['failures'], 'errors': jsonunit['errors'], 'regressions': jsonunit['regressions'], 'successfixes': jsonunit['successfixes'], 'skips': jsonunit['skips'], 'total': jsonunit['total'], 'time': jsonunit['time'] }) flask.g.db_conn.execute(query)
def test_junit2dict(): result = transformations.junit2dict(JUNIT) assert result == JSONUNIT
def test_junit2dict_invalid(): # remove the first closing testcase tag, in order to make the json invalid invalid_junit = JUNIT.replace('</testcase>', '', 1) result = transformations.junit2dict(BytesIO(invalid_junit.encode('utf-8'))) assert 'ParseError' in result['error']
def test_junit2dict(): result = transformations.junit2dict(BytesIO(JUNIT.encode('utf-8'))) assert result == JSONUNIT
def test_junit2dict_invalid(): # remove the first closing testcase tag, in order to make the json invalid invalid_junit = JUNIT.replace('</testcase>', '', 1) result = transformations.junit2dict(invalid_junit) assert 'XMLSyntaxError' in result['error']
def test_junit2dict_empty(): result = transformations.junit2dict('') assert result == {}
def create_files(user): file_info = get_file_info_from_headers(dict(flask.request.headers)) swift = dci_config.get_store('files') values = dict.fromkeys( ['md5', 'mime', 'jobstate_id', 'job_id', 'name', 'test_id']) values.update(file_info) if values.get('jobstate_id') is None and values.get('job_id') is None: raise dci_exc.DCIException('HTTP headers DCI-JOBSTATE-ID or ' 'DCI-JOB-ID must be specified') if values.get('name') is None: raise dci_exc.DCIException('HTTP header DCI-NAME must be specified') if values['jobstate_id']: query = v1_utils.QueryBuilder(models.JOBSTATES) query.add_extra_condition( models.JOBSTATES.c.id == values['jobstate_id']) row = query.execute(fetchone=True) if row is None: raise dci_exc.DCINotFound('Jobstate', values['jobstate_id']) values['job_id'] = row['jobstates_job_id'] query = v1_utils.QueryBuilder(models.JOBS) if not auth.is_admin(user): query.add_extra_condition(models.JOBS.c.team_id == user['team_id']) query.add_extra_condition(models.JOBS.c.id == values['job_id']) row = query.execute(fetchone=True) if row is None: raise dci_exc.DCINotFound('Job', values['job_id']) file_id = utils.gen_uuid() # ensure the directory which will contains the file actually exist file_path = swift.build_file_path(user['team_id'], values['job_id'], file_id) content = files.get_stream_or_content_from_request(flask.request) swift.upload(file_path, content) s_file = swift.head(file_path) etag = utils.gen_etag() values.update({ 'id': file_id, 'created_at': datetime.datetime.utcnow().isoformat(), 'updated_at': datetime.datetime.utcnow().isoformat(), 'team_id': user['team_id'], 'md5': None, 'size': s_file['content-length'], 'state': 'active', 'etag': etag, }) query = _TABLE.insert().values(**values) with flask.g.db_conn.begin(): flask.g.db_conn.execute(query) result = json.dumps({'file': values}) if values['mime'] == 'application/junit': _, file_descriptor = swift.get(file_path) junit = tsfm.junit2dict(file_descriptor.read()) query = models.TESTS_RESULTS.insert().values({ 'id': utils.gen_uuid(), 'created_at': values['created_at'], 'updated_at': datetime.datetime.utcnow().isoformat(), 'file_id': file_id, 'job_id': values['job_id'], 'name': values['name'], 'success': junit['success'], 'failures': junit['failures'], 'errors': junit['errors'], 'skips': junit['skips'], 'total': junit['total'], 'time': junit['time'] }) flask.g.db_conn.execute(query) files_events.create_event(file_id, models.FILES_CREATE) return flask.Response(result, 201, content_type='application/json')