def get(self, project_id, source_id): project = Project.get(project_id) if not project: return '', 404 repo = project.repository source = Source.query.filter( Source.id == source_id, Source.repository_id == repo.id, ).first() if source is None: return '', 404 context = self.serialize(source) if source.patch: context['diff'] = source.patch.diff else: vcs = repo.get_vcs() if vcs: try: context['diff'] = vcs.export(source.revision_sha) except Exception: context['diff'] = None else: context['diff'] = None return self.respond(context)
def post(self, project_id): project = Project.get(project_id) if project is None: return '', 404 args = self.post_parser.parse_args() if args.name: project.name = args.name if args.slug: match = Project.query.filter( Project.slug == args.slug, Project.id != project.id, ).first() if match: return '{"error": "Project with slug %r already exists"}' % (args.slug,), 400 project.slug = args.slug if args.repository: repository = Repository.get(args.repository) if repository is None: return '{"error": "Repository with url %r does not exist"}' % (args.repository,), 400 project.repository = repository db.session.add(project) data = self.serialize(project) data['repository'] = self.serialize(project.repository) return self.respond(data, serialize=False)
def get(self, project_id, test_hash): project = Project.get(project_id) if not project: return '', 404 # use the most recent test run to find basic details test = TestCase.query.filter( TestCase.project_id == project_id, TestCase.name_sha == test_hash, ).order_by(TestCase.date_created.desc()).limit(1).first() if not test: return '', 404 first_test = TestCase.query.filter( TestCase.project_id == project_id, TestCase.name_sha == test_hash, ).order_by(TestCase.date_created.asc()).limit(1).first() first_build = Build.query.options( joinedload('author'), joinedload('source').joinedload('revision'), ).filter(Build.id == first_test.job.build_id, ).first() context = self.serialize(test, { TestCase: GeneralizedTestCase(), }) context.update({ 'firstBuild': first_build, }) return self.respond(context, serialize=False)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 args = self.parser.parse_args() if args.date: try: query_date = datetime.strptime(args.date, '%Y-%m-%d').date() except: return 'Can\'t parse date "%s"' % (args.date), 500 else: # This `7` is hard-coded to match the code in config.py which kicks # off the cron job 7 hours past midnight GMT (which corresponds to # midnight PST) delta = timedelta(days=2 if datetime.utcnow().hour < 7 else 1) query_date = datetime.utcnow().date() - delta data = { 'date': str(query_date), 'chartData': self.get_chart_data(project_id, query_date), 'flakyTests': self.get_flaky_tests(project_id, query_date) } return self.respond(data, serialize=False)
def get(self, project_id, commit_id): project = Project.get(project_id) if not project: return '', 404 repo = project.repository revision = Revision.query.filter( Revision.repository_id == repo.id, Revision.sha == commit_id, ).join(Revision.author).first() if not revision: return '', 404 build_list = list(Build.query.options( joinedload('author'), contains_eager('source'), ).join( Source, Build.source_id == Source.id, ).filter( Build.project_id == project.id, Source.revision_sha == revision.sha, Source.patch == None, # NOQA ).order_by(Build.date_created.desc()))[:100] context = self.serialize(revision) context.update({ 'repository': repo, 'builds': build_list, }) return self.respond(context)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 args = self.get_parser.parse_args() filters = [] if args.authors: filters.append(Build.author_id.in_([a.id for a in args.authors])) elif args.authors is not None: return [] if args.source: filters.append(Build.target.startswith(args.source)) # is this from the search bar if args.query: clauses = [] # search by revision title clauses.append(Build.label.contains(args.query)) # search by prefix clauses.append(Build.target.startswith(args.query)) # allows users to paste a full commit hash and still # find the relevant build(s). Should be fine for mercurial/git, # and svn will never have long enough strings if len(args.query) > 12: clauses.append(Build.target.startswith(args.query[0:12])) # if they searched for something that looks like a phabricator # identifier, try to find it if might_be_diffusion_iden(args.query): possible_hash = get_hash_from_diffusion_iden(args.query) if possible_hash: # the query should always be at least as long or longer than # our commit identifiers clauses.append( Build.target.startswith(possible_hash[0:12])) filters.append(or_(*clauses)) if args.result: filters.append(Build.result == Result[args.result]) if args.patches_only: filters.append(Source.patch_id != None) # NOQA elif not args.include_patches: filters.append(Source.patch_id == None) # NOQA queryset = Build.query.options( joinedload('project', innerjoin=True), joinedload('author'), contains_eager('source').joinedload('revision'), ).join( Source, Source.id == Build.source_id, ).filter( Build.project_id == project.id, *filters ).order_by(Build.date_created.desc()) return self.paginate(queryset)
def get(self, project_id, commit_id): project = Project.get(project_id) if not project: return '', 404 repo = project.repository revision = Revision.query.filter( Revision.repository_id == repo.id, Revision.sha == commit_id, ).join(Revision.author).first() if not revision: return '', 404 build_list = list( Build.query.options( joinedload('author'), contains_eager('source'), ).join( Source, Build.source_id == Source.id, ).filter( Build.project_id == project.id, Source.revision_sha == revision.sha, Source.patch == None, # NOQA ).order_by(Build.date_created.desc()))[:100] context = self.serialize(revision) context.update({ 'repository': repo, 'builds': build_list, }) return self.respond(context)
def get(self, project_id, commit_id): project = Project.get(project_id) if not project: return '', 404 repo = project.repository revision = Revision.query.filter( Revision.repository_id == repo.id, Revision.sha == commit_id, ).join(Revision.author).first() if not revision: return '', 404 build_query = Build.query.options( joinedload('author'), contains_eager('source').joinedload('revision'), ).join( Source, Build.source_id == Source.id, ).filter( Build.project_id == project.id, Source.revision_sha == revision.sha, Source.patch == None, # NOQA ).order_by(Build.date_created.desc()) return self.paginate(build_query)
def get(self, project_id, test_hash): project = Project.get(project_id) if not project: return '', 404 # use the most recent test run to find basic details test = TestCase.query.filter( TestCase.project_id == project_id, TestCase.name_sha == test_hash, ).order_by(TestCase.date_created.desc()).limit(1).first() if not test: return '', 404 first_test = TestCase.query.filter( TestCase.project_id == project_id, TestCase.name_sha == test_hash, ).order_by(TestCase.date_created.asc()).limit(1).first() first_build = Build.query.options( joinedload('author'), joinedload('source').joinedload('revision'), ).filter( Build.id == first_test.job.build_id, ).first() context = self.serialize(test, { TestCase: GeneralizedTestCase(), }) context.update({ 'firstBuild': first_build, }) return self.respond(context, serialize=False)
def get(self, project_id, commit_id): project = Project.get(project_id) if not project: return '', 404 repo = project.repository try: revision = Revision.get_by_sha_prefix_query( repo.id, commit_id, ).options( joinedload('author') ).scalar() except MultipleResultsFound: return '', 404 else: if not revision: return '', 404 context = self.serialize(revision) context.update({ 'repository': repo, }) return self.respond(context)
def get(self, project_id): project = Project.get(project_id) if project is None: return '', 404 args = self.get_parser.parse_args() queryset = Plan.query.filter( Plan.project_id == project.id, ) if args.query: queryset = queryset.filter( func.lower(Plan.label).contains(args.query.lower()), ) if args.status: queryset = queryset.filter( Plan.status == PlanStatus[args.status], ) if args.sort == 'name': queryset = queryset.order_by(Plan.label.asc()) elif args.sort == 'date': queryset = queryset.order_by(Plan.date_created.asc()) return self.paginate(queryset)
def get(self, project_id, commit_id): project = Project.get(project_id) if not project: return '', 404 repo = project.repository revision = Revision.query.options(joinedload('author'), ).filter( Revision.repository_id == repo.id, Revision.sha == commit_id, ).first() if not revision: return '', 404 build_query = Build.query.options( joinedload('author'), contains_eager('source').joinedload('revision'), ).join( Source, Build.source_id == Source.id, ).filter( Build.project_id == project.id, Source.revision_sha == revision.sha, Source.patch == None, # NOQA ).order_by(Build.date_created.desc()) return self.paginate(build_query)
def get(self, project_id, test_hash): project = Project.get(project_id) if not project: return '', 404 # use the most recent test run to find basic details test = TestCase.query.filter( TestCase.project_id == project_id, TestCase.name_sha == test_hash, ).order_by(TestCase.date_created.desc()).limit(1).first() if not test: return '', 404 args = self.get_parser.parse_args() num_results = args.per_page # restrict the join to the last N jobs otherwise this can get # significantly expensive as we have to seek quite a ways job_sq = Job.query.filter( Job.status == Status.finished, Job.project_id == project_id, ).order_by(Job.date_created.desc()).limit(num_results * 10).subquery() recent_runs = list( TestCase.query.options( contains_eager('job', alias=job_sq), contains_eager('job.source'), joinedload('job.build'), joinedload('job.build.author'), joinedload('job.build.source'), joinedload('job.build.source.revision'), ).join( job_sq, TestCase.job_id == job_sq.c.id, ).join( Source, job_sq.c.source_id == Source.id, ).filter( Source.repository_id == project.repository_id, Source.patch_id == None, # NOQA Source.revision_sha != None, # NOQA TestCase.name_sha == test.name_sha, ).order_by(job_sq.c.date_created.desc())) jobs = set(r.job for r in recent_runs) builds = set(j.build for j in jobs) serialized_jobs = dict(zip(jobs, self.serialize(jobs))) serialized_builds = dict(zip(builds, self.serialize(builds))) results = [] for recent_run, s_recent_run in zip(recent_runs, self.serialize(recent_runs)): s_recent_run['job'] = serialized_jobs[recent_run.job] s_recent_run['job']['build'] = serialized_builds[ recent_run.job.build] results.append(s_recent_run) return self.paginate(results, serialize=False)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 query = request.args.get('q', request.args.get('query')) source = request.args.get('source') filters = [] if source: filters.append(Build.target.startswith(source)) if query: filters.append(or_( Build.label.contains(query), Build.target.startswith(query), )) queryset = Build.query.options( joinedload('project', innerjoin=True), joinedload('author'), joinedload('source'), ).filter( Build.project_id == project.id, *filters ).order_by(Build.date_created.desc()) return self.paginate(queryset)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 args = self.get_parser.parse_args() filters = [] if args.source: filters.append(Build.target.startswith(args.source)) if args.query: filters.append( or_( Build.label.contains(args.query), Build.target.startswith(args.query), )) if args.result: filters.append(Build.result == Result[args.result]) queryset = Build.query.options( joinedload('project', innerjoin=True), joinedload('author'), joinedload('source'), ).filter(Build.project_id == project.id, *filters).order_by(Build.date_created.desc()) return self.paginate(queryset)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 args = self.get_parser.parse_args() filters = [] if args.source: filters.append(Build.target.startswith(args.source)) if args.query: filters.append(or_( Build.label.contains(args.query), Build.target.startswith(args.query), )) if args.result: filters.append(Build.result == Result[args.result]) queryset = Build.query.options( joinedload('project', innerjoin=True), joinedload('author'), joinedload('source').joinedload('revision'), ).filter( Build.project_id == project.id, *filters ).order_by(Build.date_created.desc()) return self.paginate(queryset)
def get(self, project_id): project = Project.get(project_id) if not project: return error('project not found', http_code=404) args = self.get_parser.parse_args() # we want to only return commits in the repo that are within the # project's whitelist paths = None if not args.every_commit: paths = self.get_whitelisted_paths(project) repo = project.repository offset = (args.page - 1) * args.per_page limit = args.per_page + 1 # +1 to tell if there are more revs to get vcs = repo.get_vcs() if vcs: try: commits = self.get_commits_from_vcs(repo, vcs, offset, limit, paths, args.parent, args.branch) except ValueError as err: return error(err.message) else: if args.parent or args.branch: param = 'Branches' if args.branch else 'Parents' return error( '{0} not supported for projects with no repository.'. format(param), http_code=422) # TODO: right now this fallback returns every commit for projects # with whitelisted paths. At the very least, we need to tell the # frontend about this (perhaps using a response header) commits = self.get_commits_from_db(repo, offset, limit) page_links = self.make_links( current_page=args.page, has_next_page=len(commits) > args.per_page, ) # we fetched one extra commit so that we'd know whether to create a # next link. Delete it commits = commits[:args.per_page] builds_map = {} if commits: builds_map = self.get_builds_for_commits(commits, project, args.all_builds) results = [] for result in commits: if args.all_builds: result['builds'] = builds_map.get(result['id'], []) else: result['build'] = builds_map.get(result['id']) results.append(result) return self.respond(results, serialize=False, links=page_links)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 queryset = Snapshot.query.filter(Snapshot.project_id == project.id, ) return self.paginate(queryset)
def get(self, project_id): project = Project.get(project_id) if project is None: return '', 404 plans = Plan.query.options( subqueryload_all(Plan.steps), ).filter( Plan.projects.contains(project), ) last_build = Build.query.options( joinedload('author'), contains_eager('source') ).join( Source, Build.source_id == Source.id, ).filter( Source.patch_id == None, # NOQA Build.project == project, Build.status == Status.finished, ).order_by( Build.date_created.desc(), ).first() if not last_build or last_build.result == Result.passed: last_passing_build = last_build else: last_passing_build = Build.query.options( joinedload('author'), contains_eager('source') ).join( Source, Build.source_id == Source.id, ).filter( Source.patch_id == None, # NOQA Build.project == project, Build.result == Result.passed, Build.status == Status.finished, ).order_by( Build.date_created.desc(), ).first() options = dict( (o.name, o.value) for o in ProjectOption.query.filter( ProjectOption.project_id == project.id, ) ) for key, value in OPTION_DEFAULTS.iteritems(): options.setdefault(key, value) data = self.serialize(project) data['lastBuild'] = last_build data['lastPassingBuild'] = last_passing_build data['repository'] = project.repository data['plans'] = list(plans) data['options'] = options data['stats'] = self._get_stats(project) return self.respond(data)
def get(self, project_id, test_hash): project = Project.get(project_id) if not project: return '', 404 # use the most recent test run to find basic details test = TestCase.query.filter( TestCase.project_id == project_id, TestCase.name_sha == test_hash, ).order_by(TestCase.date_created.desc()).limit(1).first() if not test: return '', 404 # restrict the join to the last 1000 jobs otherwise this can get # significantly expensive as we have to seek quite a ways job_sq = Job.query.filter( Job.status == Status.finished, Job.project_id == project_id, ).order_by(Job.date_created.desc()).limit(1000).subquery() recent_runs = list(TestCase.query.options( contains_eager('job', alias=job_sq), contains_eager('job.source'), joinedload('job', 'build'), ).join( job_sq, TestCase.job_id == job_sq.c.id, ).join( Source, job_sq.c.source_id == Source.id, ).filter( Source.patch_id == None, # NOQA Source.revision_sha != None, # NOQA TestCase.name_sha == test.name_sha, ).order_by(job_sq.c.date_created.desc())[:25]) first_build = Build.query.join( Job, Job.build_id == Build.id, ).join( TestCase, TestCase.job_id == Job.id, ).filter( TestCase.project_id == project_id, TestCase.name_sha == test_hash, ).order_by(TestCase.date_created.asc()).limit(1).first() extended_serializers = { TestCase: TestCaseWithJobSerializer(), Job: JobWithBuildSerializer(), } context = self.serialize(test, { TestCase: GeneralizedTestCase(), }) context.update({ 'results': self.serialize(recent_runs, extended_serializers), 'firstBuild': first_build, }) return self.respond(context, serialize=False)
def get(self, project_id): project = Project.get(project_id) if not project: return error('project not found', http_code=404) args = self.get_parser.parse_args() # we want to only return commits in the repo that are within the # project's whitelist paths = None if not args.every_commit: paths = self.get_whitelisted_paths(project) repo = project.repository offset = (args.page - 1) * args.per_page limit = args.per_page + 1 # +1 to tell if there are more revs to get vcs = repo.get_vcs() if vcs: try: commits = self.get_commits_from_vcs( repo, vcs, offset, limit, paths, args.parent, args.branch) except ValueError as err: return error(err.message) else: if args.parent or args.branch: param = 'Branches' if args.branch else 'Parents' return error( '{0} not supported for projects with no repository.'.format(param), http_code=422) # TODO: right now this fallback returns every commit for projects # with whitelisted paths. At the very least, we need to tell the # frontend about this (perhaps using a response header) commits = self.get_commits_from_db(repo, offset, limit) page_links = self.make_links( current_page=args.page, has_next_page=len(commits) > args.per_page, ) # we fetched one extra commit so that we'd know whether to create a # next link. Delete it commits = commits[:args.per_page] builds_map = {} if commits: builds_map = self.get_builds_for_commits( commits, project, args.all_builds) results = [] for result in commits: if args.all_builds: result['builds'] = builds_map.get(result['id'], []) else: result['build'] = builds_map.get(result['id']) results.append(result) return self.respond(results, serialize=False, links=page_links)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 queryset = Snapshot.query.filter( Snapshot.project_id == project.id, ) return self.paginate(queryset)
def get(self, project_id, test_hash): project = Project.get(project_id) if not project: return '', 404 # use the most recent test run to find basic details test = TestCase.query.filter( TestCase.project_id == project_id, TestCase.name_sha == test_hash, ).order_by(TestCase.date_created.desc()).limit(1).first() if not test: return '', 404 args = self.get_parser.parse_args() num_results = args.per_page # restrict the join to the last N jobs otherwise this can get # significantly expensive as we have to seek quite a ways job_sq = Job.query.filter( Job.status == Status.finished, Job.project_id == project_id, ).order_by(Job.date_created.desc()).limit(num_results * 10).subquery() recent_runs = list(TestCase.query.options( contains_eager('job', alias=job_sq), contains_eager('job.source'), joinedload('job.build'), joinedload('job.build.author'), joinedload('job.build.source'), joinedload('job.build.source.revision'), ).join( job_sq, TestCase.job_id == job_sq.c.id, ).join( Source, job_sq.c.source_id == Source.id, ).filter( Source.repository_id == project.repository_id, Source.patch_id == None, # NOQA Source.revision_sha != None, # NOQA TestCase.name_sha == test.name_sha, ).order_by(job_sq.c.date_created.desc())[:num_results]) jobs = set(r.job for r in recent_runs) builds = set(j.build for j in jobs) serialized_jobs = dict(zip(jobs, self.serialize(jobs))) serialized_builds = dict(zip(builds, self.serialize(builds))) results = [] for recent_run, s_recent_run in zip(recent_runs, self.serialize(recent_runs)): s_recent_run['job'] = serialized_jobs[recent_run.job] s_recent_run['job']['build'] = serialized_builds[recent_run.job.build] results.append(s_recent_run) return self.respond(results, serialize=False)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 args = self.parser.parse_args() latest_build = Build.query.join( Source, Source.id == Build.source_id, ).filter( Source.patch_id == None, # NOQA Build.project_id == project.id, Build.result == Result.passed, Build.status == Status.finished, ).order_by( Build.date_created.desc(), ).limit(1).first() if not latest_build: return self.respond([]) job_list = db.session.query(Job.id).filter( Job.build_id == latest_build.id, ) if not job_list: return self.respond([]) # use the most recent test test_list = TestCase.query.filter( TestCase.project_id == project.id, TestCase.job_id.in_(job_list), ) if args.min_duration: test_list = test_list.filter( TestCase.duration >= args.min_duration, ) if args.query: test_list = test_list.filter( TestCase.name.contains(args.query), ) if args.sort == 'duration': sort_by = TestCase.duration.desc() elif args.sort == 'name': sort_by = TestCase.name.asc() test_list = test_list.order_by(sort_by) return self.paginate(test_list, serializers={ TestCase: GeneralizedTestCase(), })
def post(self, project_id): project = Project.get(project_id) if project is None: return '', 404 args = self.post_parser.parse_args() plan = Plan(label=args.name, project_id=project.id,) db.session.add(plan) return self.respond(plan)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 queryset = Snapshot.query.options( joinedload('source').joinedload('revision'), ).filter( Snapshot.project_id == project.id, ).order_by( Snapshot.date_created.desc(), ) return self.paginate(queryset)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 args = self.parser.parse_args() latest_build = Build.query.join( Source, Source.id == Build.source_id, ).filter( Source.patch_id == None, # NOQA Build.project_id == project.id, Build.result == Result.passed, Build.status == Status.finished, ).order_by( Build.date_created.desc(), ).limit(1).first() if not latest_build: return self.respond([]) job_list = db.session.query(Job.id).filter( Job.build_id == latest_build.id, ) # use the most recent test test_list = TestCase.query.filter( TestCase.project_id == project.id, TestCase.job_id.in_(job_list), ) if args.min_duration: test_list = test_list.filter( TestCase.duration >= args.min_duration, ) if args.query: test_list = test_list.filter( TestCase.name.contains(args.query), ) if args.sort == 'duration': sort_by = TestCase.duration.desc() elif args.sort == 'name': sort_by = TestCase.name.asc() test_list = test_list.order_by(sort_by) return self.paginate(test_list, serializers={ TestCase: GeneralizedTestCase(), })
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 queryset = Plan.query.options( subqueryload_all(Plan.steps), ).filter( Plan.projects.contains(project), ).order_by( Plan.label.asc(), ) return self.paginate(queryset)
def post(self, project_id): """Initiates a new snapshot for this project.""" project = Project.get(project_id) if not project: return '', 404 # TODO(adegtiar): initialize a snapshot build. snapshot = Snapshot(project_id=project.id, ) db.session.add(snapshot) db.session.commit() # TODO(adegtiar): execute the build. return self.respond(snapshot)
def post(self, project_id): """Initiates a new snapshot for this project.""" project = Project.get(project_id) if not project: return '', 404 # TODO(adegtiar): initialize a snapshot build. snapshot = Snapshot( project_id=project.id, ) db.session.add(snapshot) db.session.commit() # TODO(adegtiar): execute the build. return self.respond(snapshot)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 args = self.parser.parse_args() latest_build = Build.query.join( Source, Source.id == Build.source_id, ).filter( Source.patch_id == None, # NOQA Build.project_id == project.id, Build.result == Result.passed, Build.status == Status.finished, ).order_by( Build.date_created.desc(), ).limit(1).first() if not latest_build: return self.respond([]) # use the most recent coverage cover_list = FileCoverage.query.filter( FileCoverage.job_id.in_( db.session.query(Job.id).filter(Job.build_id == latest_build.id) ) ) if args.query: cover_list = cover_list.filter( FileCoverage.filename.startswith(args.query), ) if args.sort == 'lines_covered': sort_by = FileCoverage.lines_covered.desc() elif args.sort == 'lines_covered': sort_by = FileCoverage.lines_uncovered.desc() elif args.sort == 'name': sort_by = FileCoverage.filename.asc() cover_list = cover_list.order_by(sort_by) return self.paginate(cover_list, serializers={ FileCoverage: GeneralizedFileCoverage(), })
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 latest_job = Job.query.options( subqueryload(Job.project), ).join( Source, Source.id == Job.source_id, ).filter( Source.patch_id == None, # NOQA Job.project == project, Job.result == Result.passed, Job.status == Status.finished, ).order_by( Job.date_created.desc(), ).limit(1).first() if latest_job: test_list = db.session.query(AggregateTestGroup, TestGroup).options( subqueryload(AggregateTestGroup.first_job), subqueryload(AggregateTestGroup.parent), subqueryload(TestGroup.parent), ).join( TestGroup, and_( TestGroup.job_id == latest_job.id, TestGroup.name_sha == AggregateTestGroup.name_sha, ) ).filter( AggregateTestGroup.parent_id == None, # NOQA: we have to use == here AggregateTestGroup.project_id == project.id, ).order_by(TestGroup.duration.desc()) results = [] for agg, group in test_list: agg.last_testgroup = group results.append(agg) else: results = [] context = { 'tests': results, } return self.respond(context)
def get(self, project_id, test_hash): project = Project.get(project_id) if not project: return '', 404 # use the most recent test run to find basic details test = TestCase.query.filter( TestCase.project_id == project_id, TestCase.name_sha == test_hash, ).order_by(TestCase.date_created.desc()).limit(1).first() if not test: return '', 404 args = self.get_parser.parse_args() return self.paginate(HistorySliceable(project_id, args.branch, test, project.repository_id, self.serialize), serialize=False)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 args = self.parser.parse_args() latest_build = Build.query.join( Source, Source.id == Build.source_id, ).filter( Source.patch_id == None, # NOQA Build.project_id == project.id, Build.result == Result.passed, Build.status == Status.finished, ).order_by(Build.date_created.desc(), ).limit(1).first() if not latest_build: return self.respond([]) # use the most recent coverage cover_list = FileCoverage.query.filter( FileCoverage.job_id.in_( db.session.query( Job.id).filter(Job.build_id == latest_build.id))) if args.query: cover_list = cover_list.filter( FileCoverage.filename.startswith(args.query), ) if args.sort == 'lines_covered': sort_by = FileCoverage.lines_covered.desc() elif args.sort == 'lines_covered': sort_by = FileCoverage.lines_uncovered.desc() elif args.sort == 'name': sort_by = FileCoverage.filename.asc() cover_list = cover_list.order_by(sort_by) return self.paginate(cover_list, serializers={ FileCoverage: GeneralizedFileCoverage(), })
def get(self, project_id, test_hash): project = Project.get(project_id) if not project: return '', 404 # use the most recent test run to find basic details test = TestCase.query.filter( TestCase.project_id == project_id, TestCase.name_sha == test_hash, ).order_by(TestCase.date_created.desc()).limit(1).first() if not test: return '', 404 args = self.get_parser.parse_args() return self.paginate( HistorySliceable(project_id, args.branch, test, project.repository_id, self.serialize), serialize=False )
def get(self, project_id, source_id): project = Project.get(project_id) if not project: return '', 404 repo = project.repository source = Source.query.filter( Source.id == source_id, Source.repository_id == repo.id, ).first() if source is None: return '', 404 build_query = Build.query.options(joinedload('author'), ).filter( Build.project_id == project.id, Build.source_id == source.id, ).order_by(Build.date_created.desc()) return self.paginate(build_query)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 args = self.get_parser.parse_args() filters = [] if args.authors: filters.append(Build.author_id.in_([a.id for a in args.authors])) elif args.authors is not None: return [] if args.source: filters.append(Build.target.startswith(args.source)) if args.query: filters.append(or_( Build.label.contains(args.query), Build.target.startswith(args.query), )) if args.result: filters.append(Build.result == Result[args.result]) if args.patches_only: filters.append(Source.patch_id != None) # NOQA elif not args.include_patches: filters.append(Source.patch_id == None) # NOQA queryset = Build.query.options( joinedload('project', innerjoin=True), joinedload('author'), contains_eager('source').joinedload('revision'), ).join( Source, Source.id == Build.source_id, ).filter( Build.project_id == project.id, *filters ).order_by(Build.date_created.desc()) return self.paginate(queryset)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 args = self.parser.parse_args() latest_job = Job.query.join( Source, Source.id == Job.source_id, ).filter( Source.patch_id == None, # NOQA Job.project_id == project.id, Job.result == Result.passed, Job.status == Status.finished, ).order_by( Job.date_created.desc(), ).limit(1).first() if not latest_job: return self.respond([]) # use the most recent test test_list = TestCase.query.filter( TestCase.project_id == project_id, TestCase.job_id == latest_job.id, ) if args.query: test_list = test_list.filter( TestCase.name.startswith(args.query), ) if args.sort == 'duration': sort_by = TestCase.duration.desc() elif args.sort == 'name': sort_by = TestCase.name.asc() test_list = test_list.order_by(sort_by) return self.paginate(test_list, serializers={ TestCase: GeneralizedTestCase(), })
def get(self, project_id, commit_id): project = Project.get(project_id) if not project: return '', 404 repo = project.repository revision = Revision.query.filter( Revision.repository_id == repo.id, Revision.sha == commit_id, ).join(Revision.author).first() if not revision: return '', 404 context = self.serialize(revision) context.update({ 'repository': repo, }) return self.respond(context)
def get(self, project_id, source_id): project = Project.get(project_id) if not project: return '', 404 repo = project.repository source = Source.query.filter( Source.id == source_id, Source.repository_id == repo.id, ).first() if source is None: return '', 404 build_list = list(Build.query.options( joinedload('author'), ).filter( Build.source_id == source.id, ).order_by(Build.date_created.desc()))[:100] return self.respond(build_list)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 include_patches = request.args.get('include_patches') or '1' queryset = Build.query.options( joinedload('project'), joinedload('author'), joinedload('source').joinedload('revision'), ).filter(Build.project_id == project.id, ).order_by( Build.date_created.desc()) if include_patches == '0': queryset = queryset.filter( Build.patch == None, # NOQA ) return self.paginate(queryset)
def get(self, project_id, commit_id): project = Project.get(project_id) if not project: return '', 404 repo = project.repository revision = Revision.query.options(joinedload('author'), ).filter( Revision.repository_id == repo.id, Revision.sha == commit_id, ).first() if not revision: return '', 404 context = self.serialize(revision) context.update({ 'repository': repo, }) return self.respond(context)
def get(self, project_id, source_id): project = Project.get(project_id) if not project: return '', 404 repo = project.repository source = Source.query.filter( Source.id == source_id, Source.repository_id == repo.id, ).first() if source is None: return '', 404 context = self.serialize(source) diff = source.generate_diff() if diff: files = self._get_files_from_raw_diff(diff) coverage = { c.filename: c.data for c in get_coverage_by_source_id(source_id) if c.filename in files } coverage_for_added_lines = self._filter_coverage_for_added_lines( diff, coverage) tails_info = dict(source.data) else: coverage = None coverage_for_added_lines = None tails_info = None context['diff'] = diff context['coverage'] = coverage context['coverageForAddedLines'] = coverage_for_added_lines context['tailsInfo'] = tails_info return self.respond(context)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 include_patches = request.args.get('include_patches') or '1' queryset = Build.query.options( joinedload('project'), joinedload('author'), joinedload('source').joinedload('revision'), ).filter( Build.project_id == project.id, ).order_by(Build.date_created.desc()) if include_patches == '0': queryset = queryset.filter( Build.patch == None, # NOQA ) return self.paginate(queryset)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 args = self.parser.parse_args() if args.date: try: query_date = datetime.strptime(args.date, '%Y-%m-%d').date() except: return 'Can\'t parse date "%s"' % (args.date), 500 else: query_date = date.today() - timedelta(days=1) data = { 'date': str(query_date), 'chartData': self.get_chart_data(project_id, query_date), 'flakyTests': self.get_flaky_tests(project_id, query_date) } return self.respond(data, serialize=False)
def post(self, project_id): project = Project.get(project_id) if project is None: return '', 404 args = self.post_parser.parse_args() if args.name: project.name = args.name if args.slug: match = Project.query.filter( Project.slug == args.slug, Project.id != project.id, ).first() if match: return '{"error": "Project with slug %r already exists"}' % ( args.slug, ), 400 project.slug = args.slug if args.repository: repository = Repository.get(args.repository) if repository is None: return '{"error": "Repository with url %r does not exist"}' % ( args.repository, ), 400 project.repository = repository if args.status == 'inactive': project.status = ProjectStatus.inactive elif args.status == 'active': project.status = ProjectStatus.active db.session.add(project) data = self.serialize(project) data['repository'] = self.serialize(project.repository) return self.respond(data, serialize=False)
def post(self, project_id): project = Project.get(project_id) if project is None: return '', 404 validator = ProjectValidator( data=request.form, initial={ 'name': project.name, 'slug': project.slug, }, ) try: result = validator.clean() except ValidationError: return '', 400 project.name = result['name'] project.slug = result['slug'] db.session.add(project) return self.respond(project)
def get(self, project_id, commit_id): project = Project.get(project_id) if not project: return '', 404 repo = project.repository try: revision = Revision.get_by_sha_prefix_query( repo.id, commit_id, ).options(joinedload('author')).scalar() except MultipleResultsFound: return '', 404 else: if not revision: return '', 404 context = self.serialize(revision) context.update({ 'repository': repo, }) return self.respond(context)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 repo = project.repository vcs = repo.get_vcs() if vcs: vcs_log = list(vcs.log()) if vcs_log: revisions_qs = list( Revision.query.options(joinedload('author'), ).filter( Revision.repository_id == repo.id, Revision.sha.in_(c.id for c in vcs_log))) revisions_map = dict((c.sha, d) for c, d in itertools.izip( revisions_qs, self.serialize(revisions_qs))) commits = [] for commit in vcs_log: if commit.id in revisions_map: result = revisions_map[commit.id] else: result = self.serialize(commit) commits.append(result) else: commits = [] else: commits = self.serialize( list( Revision.query.options(joinedload('author'), ).filter( Revision.repository_id == repo.id, ).order_by( Revision.date_created.desc())[:100])) if commits: builds_qs = list( Build.query.options( joinedload('author'), contains_eager('source'), ).join( Source, Source.id == Build.source_id, ).filter( Build.source_id == Source.id, Build.project_id == project.id, Build.status.in_( [Status.finished, Status.in_progress, Status.queued]), Source.revision_sha.in_(c['id'] for c in commits), Source.patch == None, # NOQA ).order_by(Build.date_created.asc())) builds_map = dict((b.source.revision_sha, d) for b, d in itertools.izip( builds_qs, self.serialize(builds_qs))) else: builds_map = {} results = [] for result in commits: result['build'] = builds_map.get(result['id']) results.append(result) return self.respond(results)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 args = self.get_parser.parse_args() repo = project.repository vcs = repo.get_vcs() offset = (args.page - 1) * args.per_page limit = args.per_page + 1 if vcs: vcs_log = list( vcs.log( offset=offset, limit=limit, parent=args.parent, )) if vcs_log: revisions_qs = list( Revision.query.options(joinedload('author'), ).filter( Revision.repository_id == repo.id, Revision.sha.in_(c.id for c in vcs_log))) revisions_map = dict((c.sha, d) for c, d in itertools.izip( revisions_qs, self.serialize(revisions_qs))) commits = [] for commit in vcs_log: if commit.id in revisions_map: result = revisions_map[commit.id] else: result = self.serialize(commit) commits.append(result) else: commits = [] elif args.parent: return '{"error": "Parent argument not supported"}', 400 else: commits = self.serialize( list( Revision.query.options(joinedload('author'), ).filter( Revision.repository_id == repo.id, ).order_by( Revision.date_created.desc())[offset:offset + limit])) page_links = self.make_links( current_page=args.page, has_next_page=len(commits) > args.per_page, ) commits = commits[:args.per_page] if commits: builds_qs = list( Build.query.options( joinedload('author'), contains_eager('source'), ).join( Source, Source.id == Build.source_id, ).filter( Build.source_id == Source.id, Build.project_id == project.id, Build.status.in_( [Status.finished, Status.in_progress, Status.queued]), Source.revision_sha.in_(c['id'] for c in commits), Source.patch == None, # NOQA ).order_by(Build.date_created.asc())) builds_map = dict((b.source.revision_sha, d) for b, d in itertools.izip( builds_qs, self.serialize(builds_qs))) else: builds_map = {} results = [] for result in commits: result['build'] = builds_map.get(result['id']) results.append(result) return self.respond(results, links=page_links)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 args = self.parser.parse_args() points = args.points or POINTS_DEFAULT[args.resolution] if args.from_date: date_end = datetime.fromtimestamp(args.from_date) else: date_end = datetime.now() date_end = date_end.replace(minute=0, second=0, microsecond=0) if args.resolution == '1h': grouper = func.date_trunc('hour', Build.date_created) decr_res = lambda x: x - timedelta(hours=1) elif args.resolution == '1d': grouper = func.date_trunc('day', Build.date_created) date_end = date_end.replace(hour=0) decr_res = lambda x: x - timedelta(days=1) elif args.resolution == '1w': grouper = func.date_trunc('week', Build.date_created) date_end = date_end.replace(hour=0) date_end -= timedelta(days=date_end.weekday()) decr_res = decr_week elif args.resolution == '1m': grouper = func.date_trunc('month', Build.date_created) date_end = date_end.replace(hour=0, day=1) decr_res = decr_month if args.agg: value = getattr(func, args.agg)(ItemStat.value) else: value = func.avg(ItemStat.value) date_begin = date_end.replace() for _ in xrange(points): date_begin = decr_res(date_begin) # TODO(dcramer): put minimum date bounds results = dict( db.session.query( grouper.label('grouper'), value.label('value'), ).filter( ItemStat.item_id == Build.id, ItemStat.name == args.stat, Build.project_id == project.id, Build.date_created >= date_begin, Build.date_created < date_end, ).group_by('grouper')) data = [] cur_date = date_end.replace() for _ in xrange(points): cur_date = decr_res(cur_date) data.append({ 'time': int(float(cur_date.strftime('%s.%f')) * 1000), 'value': int(float(results.get(cur_date, 0))), }) data.reverse() return self.respond(data, serialize=False)
def get(self, project_id): project = Project.get(project_id) if not project: return '', 404 args = self.get_parser.parse_args() repo = project.repository vcs = repo.get_vcs() offset = (args.page - 1) * args.per_page limit = args.per_page + 1 if vcs: try: vcs_log = list( vcs.log( offset=offset, limit=limit, parent=args.parent, branch=args.branch, )) except ValueError as err: return error(err.message) if vcs_log: revisions_qs = list( Revision.query.options(joinedload('author'), ).filter( Revision.repository_id == repo.id, Revision.sha.in_(c.id for c in vcs_log))) revisions_map = dict((c.sha, d) for c, d in itertools.izip( revisions_qs, self.serialize(revisions_qs))) commits = [] for commit in vcs_log: if commit.id in revisions_map: result = revisions_map[commit.id] else: result = self.serialize(commit) commits.append(result) else: commits = [] elif args.parent or args.branch: param = 'Branches' if args.branch else 'Parents' return error( '{0} not supported for projects with no repository.'.format( param), http_code=422) else: commits = self.serialize( list( Revision.query.options(joinedload('author'), ).filter( Revision.repository_id == repo.id, ).order_by( Revision.date_created.desc())[offset:offset + limit])) page_links = self.make_links( current_page=args.page, has_next_page=len(commits) > args.per_page, ) commits = commits[:args.per_page] if commits: builds_qs = list( Build.query.options( joinedload('author'), contains_eager('source'), ).join( Source, Source.id == Build.source_id, ).filter( Build.source_id == Source.id, Build.project_id == project.id, Build.status.in_( [Status.finished, Status.in_progress, Status.queued]), Build.cause != Cause.snapshot, Source.repository_id == project.repository_id, Source.revision_sha.in_(c['id'] for c in commits), Source.patch == None, # NOQA ).order_by(Build.date_created.asc())) builds_map = dict((b.source.revision_sha, d) for b, d in itertools.izip( builds_qs, self.serialize(builds_qs))) else: builds_map = {} results = [] for result in commits: result['build'] = builds_map.get(result['id']) results.append(result) return self.respond(results, serialize=False, links=page_links)
def get_stream_channels(self, project_id=None): project = Project.get(project_id) if not project: return Response(status=404) return ['projects:{0}:builds'.format(project.id.hex)]