def upload_testresult(): token = validate_token() if not token: return "Forbidden", 403 job_id = token['job']['id'] project_id = token['project']['id'] if 'data' not in request.files: return jsonify({}), 400 f = request.files['data'] if not allowed_file( f.filename, ("json"), ): return jsonify({}), 400 path = '/tmp/testresult.json' f.save(path) # check size if os.path.getsize(path) > 16 * 1024 * 1024: return "File too big", 400 # Parse it try: with open(path, 'r') as testresult: data = json.load(testresult) except: return 'Failed to parse json', 404 # Validate it try: validate_result(data) except ValidationError as e: return e.message, 400 cursor = conn.cursor() cursor.execute("SELECT COUNT(*) as cnt FROM test_run WHERE job_id=%s", (job_id, )) testruns = cursor.fetchone() if testruns[0] > 0: return "", 404 cursor = conn.cursor() cursor.execute( """SELECT j.project_id, b.build_number FROM job j INNER JOIN build b ON j.id = %s AND b.id = j.build_id """, (job_id, )) rows = cursor.fetchone() project_id = rows[0] build_number = rows[1] cursor = conn.cursor() cursor.execute( """SELECT name, suite, id FROM test WHERE project_id = %s""", (project_id, )) existing_tests = cursor.fetchall() test_index = {} for t in existing_tests: test_index[t[0] + '|' + t[1]] = t[2] # Lookup all IDs and prepare insert for missing tests missing_tests = [] test_runs = [] measurements = [] stats = { "tests_added": 0, "tests_duration": 0, "tests_skipped": 0, "tests_failed": 0, "tests_error": 0, "tests_passed": 0, } tests = data['tests'] for t in tests: # check if if already exists test_id = None if t['suite'] + '|' + t['name'] in test_index: # existing test test_id = test_index[t['suite'] + '|' + t['name']] else: # new test test_id = str(uuid.uuid4()) missing_tests.append( (t['name'], t['suite'], project_id, test_id, build_number)) # Track stats if t['status'] == 'fail' or t['status'] == 'failure': t['status'] = 'failure' stats['tests_failed'] += 1 elif t['status'] == 'ok': stats['tests_passed'] += 1 elif t['status'] == 'skipped': stats['tests_skipped'] += 1 elif t['status'] == 'error': stats['tests_error'] += 1 stats['tests_duration'] += t['duration'] # Create the corresponding test run test_run_id = str(uuid.uuid4()) test_runs.append( (test_run_id, t['status'], job_id, test_id, t['duration'], project_id, t.get('message', None), t.get('stack', None))) # create measurements for m in t.get('measurements', []): measurements.append( (test_run_id, m['name'], m['unit'], m['value'], project_id)) if missing_tests: insert(conn, ("name", "suite", "project_id", "id", "build_number"), missing_tests, 'test') if measurements: insert(conn, ("test_run_id", "name", "unit", "value", "project_id"), measurements, 'measurement') insert(conn, ("id", "state", "job_id", "test_id", "duration", "project_id", "message", "stack"), test_runs, 'test_run') insert( conn, ("tests_added", "tests_duration", "tests_skipped", "tests_failed", "tests_error", "tests_passed", "job_id", "project_id"), ((stats['tests_added'], stats['tests_duration'], stats['tests_skipped'], stats['tests_failed'], stats['tests_error'], stats['tests_passed'], job_id, project_id), ), 'job_stat') conn.commit() return "", 200
def post(self): job_id = g.token['job']['id'] project_id = g.token['project']['id'] if 'data' not in request.files: abort(400, 'data not set') f = request.files['data'] if not allowed_file(f.filename, ("json")): abort(400, 'file ending not allowed') path = '/tmp/%s.json' % uuid.uuid4() @after_this_request def _remove_file(response): delete_file(path) return response f.save(path) # check size if os.path.getsize(path) > 16 * 1024 * 1024: abort(400, "File too big") # Parse it try: with open(path, 'r') as testresult: data = json.load(testresult) except: abort(400, 'Failed to parse json') # Validate it try: validate_result(data) except ValidationError as e: abort(400, e.message) rows = g.db.execute_one( """ SELECT j.project_id, b.build_number FROM job j INNER JOIN build b ON j.id = %s AND b.id = j.build_id """, [job_id]) project_id = rows[0] build_number = rows[1] existing_tests = g.db.execute_many( """SELECT suite, name, id FROM test WHERE project_id = %s""", [project_id]) test_index = {} for t in existing_tests: test_index[t[0] + '|' + t[1]] = t[2] # Lookup all IDs and prepare insert for missing tests missing_tests = [] test_runs = [] measurements = [] tests = data['tests'] for t in tests: if len(t['suite']) > 250: t['suite'] = t['suite'][0:250] if len(t['name']) > 250: t['name'] = t['name'][0:250] # check if if already exists test_id = None concat_name = t['suite'] + '|' + t['name'] if concat_name in test_index: # existing test test_id = test_index[concat_name] else: # new test test_id = str(uuid.uuid4()) missing_tests.append( (t['name'], t['suite'], project_id, test_id, build_number)) # Track stats if t['status'] == 'fail' or t['status'] == 'failure': t['status'] = 'failure' # Create the corresponding test run test_run_id = str(uuid.uuid4()) test_runs.append( (test_run_id, t['status'], job_id, test_id, t['duration'], project_id, t.get('message', None), t.get('stack', None))) # create measurements for m in t.get('measurements', []): measurements.append((test_run_id, m['name'], m['unit'], m['value'], project_id)) if missing_tests: insert(g.db.conn, ("name", "suite", "project_id", "id", "build_number"), missing_tests, 'test') if measurements: insert(g.db.conn, ("test_run_id", "name", "unit", "value", "project_id"), measurements, 'measurement') insert(g.db.conn, ("id", "state", "job_id", "test_id", "duration", "project_id", "message", "stack"), test_runs, 'test_run') g.db.commit() return jsonify({})
def raises_expect(self, data, expected): try: validate_result(data) assert False except ValidationError as e: self.assertEqual(e.message, expected)