def run(self): revision = self.revision project_list = self.get_project_list() if not project_list: return options = ProjectOptionsHelper.get_options(project_list, [ 'build.branch-names', 'build.commit-trigger', 'build.file-whitelist', ]) if any(o.get('build.file-whitelist') for o in options.values()): files_changed = self.get_changed_files() else: files_changed = None projects_to_build = [] for project in project_list: if options[project.id].get('build.commit-trigger', '1') != '1': self.logger.info('build.commit-trigger is disabled for project %s', project.slug) continue branch_names = filter(bool, options[project.id].get('build.branch-names', '*').split(' ')) if not self.should_build_branch(branch_names): self.logger.info('No branches matched build.branch-names for project %s', project.slug) continue if not in_project_files_whitelist(options[project.id], files_changed): self.logger.info('No changed files matched build.file-whitelist for project %s', project.slug) continue projects_to_build.append(project.slug) for project_slug in projects_to_build: data = { 'sha': revision.sha, 'project': project_slug, 'tag': 'commit', } with current_app.test_request_context('/api/0/builds/', method='POST', data=data): try: response = BuildIndexAPIView().post() except Exception as e: print(e) self.logger.exception('Failed to create build: %s' % (e,)) else: print(response) if isinstance(response, (list, tuple)): response, status = response if status != 200: self.logger.error('Failed to create build: %s' % (response,), extra={ 'data': data, })
def _get_projects_for_diff(self, diff, files_changed): projects = list(Project.query.options( subqueryload_all('plans'), ).filter( Project.status == ProjectStatus.active, Project.repository_id == diff.source.repository_id, )) project_options = ProjectOptionsHelper.get_options(projects, ['build.file-whitelist', 'phabricator.diff-trigger']) projects = [ x for x in projects if get_build_plans(x) and project_options[x.id].get('phabricator.diff-trigger', '1') == '1' and in_project_files_whitelist(project_options[x.id], files_changed) ] return projects
def post(self): """ Notify Changes of a newly created diff. Depending on system configuration, this may create 0 or more new builds, and the resulting response will be a list of those build objects. """ args = self.parser.parse_args() repository = args.repository if not args.repository: return error("Repository not found") projects = list( Project.query.options(subqueryload_all('plans'), ).filter( Project.status == ProjectStatus.active, Project.repository_id == repository.id, )) # no projects bound to repository if not projects: return self.respond([]) options = dict( db.session.query( ProjectOption.project_id, ProjectOption.value).filter( ProjectOption.project_id.in_([p.id for p in projects]), ProjectOption.name.in_([ 'phabricator.diff-trigger', ]))) projects = [p for p in projects if options.get(p.id, '1') == '1'] if not projects: return self.respond([]) label = args.label[:128] author = args.author message = args.message sha = args.sha target = 'D{}'.format(args['phabricator.revisionID']) try: identify_revision(repository, sha) except MissingRevision: return error("Unable to find commit %s in %s." % (sha, repository.url), problems=['sha', 'repository']) source_data = { 'phabricator.buildTargetPHID': args['phabricator.buildTargetPHID'], 'phabricator.diffID': args['phabricator.diffID'], 'phabricator.revisionID': args['phabricator.revisionID'], 'phabricator.revisionURL': args['phabricator.revisionURL'], } patch = Patch( repository=repository, parent_revision_sha=sha, diff=''.join(args.patch_file), ) db.session.add(patch) source = Source( patch=patch, repository=repository, revision_sha=sha, data=source_data, ) db.session.add(source) phabricatordiff = try_create( PhabricatorDiff, { 'diff_id': args['phabricator.diffID'], 'revision_id': args['phabricator.revisionID'], 'url': args['phabricator.revisionURL'], 'source': source, }) if phabricatordiff is None: logging.error("Diff %s, Revision %s already exists", args['phabricator.diffID'], args['phabricator.revisionID']) return error("Diff already exists within Changes") project_options = ProjectOptionsHelper.get_options( projects, ['build.file-whitelist']) diff_parser = DiffParser(patch.diff) files_changed = diff_parser.get_changed_files() collection_id = uuid.uuid4() builds = [] for project in projects: plan_list = get_build_plans(project) if not plan_list: logging.warning('No plans defined for project %s', project.slug) continue if not in_project_files_whitelist(project_options[project.id], files_changed): logging.info( 'No changed files matched build.file-whitelist for project %s', project.slug) continue builds.append( create_build( project=project, collection_id=collection_id, sha=sha, target=target, label=label, message=message, author=author, patch=patch, )) return self.respond(builds)
def run(self): revision = self.revision project_list = self.get_project_list() if not project_list: return options = ProjectOptionsHelper.get_options(project_list, [ 'build.branch-names', 'build.commit-trigger', 'build.file-whitelist', ]) if any(o.get('build.file-whitelist') for o in options.values()): files_changed = self.get_changed_files() else: files_changed = None projects_to_build = [] for project in project_list: if options[project.id].get('build.commit-trigger', '1') != '1': self.logger.info( 'build.commit-trigger is disabled for project %s', project.slug) continue branch_names = filter( bool, options[project.id].get('build.branch-names', '*').split(' ')) if not self.should_build_branch(branch_names): self.logger.info( 'No branches matched build.branch-names for project %s', project.slug) continue if not in_project_files_whitelist(options[project.id], files_changed): self.logger.info( 'No changed files matched build.file-whitelist for project %s', project.slug) continue projects_to_build.append(project.slug) for project_slug in projects_to_build: data = { 'sha': revision.sha, 'project': project_slug, 'tag': 'commit', } with current_app.test_request_context('/api/0/builds/', method='POST', data=data): try: response = BuildIndexAPIView().post() except Exception as e: print(e) self.logger.exception('Failed to create build: %s' % (e, )) else: print(response) if isinstance(response, (list, tuple)): response, status = response if status != 200: self.logger.error('Failed to create build: %s' % (response, ), extra={ 'data': data, })
def post(self): """ Notify Changes of a newly created diff. Depending on system configuration, this may create 0 or more new builds, and the resulting response will be a list of those build objects. """ args = self.parser.parse_args() repository = args.repository if not args.repository: return error("Repository not found") projects = list(Project.query.options( subqueryload_all('plans'), ).filter( Project.status == ProjectStatus.active, Project.repository_id == repository.id, )) # no projects bound to repository if not projects: return self.respond([]) options = dict( db.session.query( ProjectOption.project_id, ProjectOption.value ).filter( ProjectOption.project_id.in_([p.id for p in projects]), ProjectOption.name.in_([ 'phabricator.diff-trigger', ]) ) ) projects = [ p for p in projects if options.get(p.id, '1') == '1' ] if not projects: return self.respond([]) label = args.label[:128] author = args.author message = args.message sha = args.sha target = 'D{}'.format(args['phabricator.revisionID']) try: identify_revision(repository, sha) except MissingRevision: return error("Unable to find commit %s in %s." % ( sha, repository.url), problems=['sha', 'repository']) source_data = { 'phabricator.buildTargetPHID': args['phabricator.buildTargetPHID'], 'phabricator.diffID': args['phabricator.diffID'], 'phabricator.revisionID': args['phabricator.revisionID'], 'phabricator.revisionURL': args['phabricator.revisionURL'], } patch = Patch( repository=repository, parent_revision_sha=sha, diff=''.join(args.patch_file), ) db.session.add(patch) source = Source( patch=patch, repository=repository, revision_sha=sha, data=source_data, ) db.session.add(source) phabricatordiff = try_create(PhabricatorDiff, { 'diff_id': args['phabricator.diffID'], 'revision_id': args['phabricator.revisionID'], 'url': args['phabricator.revisionURL'], 'source': source, }) if phabricatordiff is None: logging.error("Diff %s, Revision %s already exists", args['phabricator.diffID'], args['phabricator.revisionID']) return error("Diff already exists within Changes") project_options = ProjectOptionsHelper.get_options(projects, ['build.file-whitelist']) diff_parser = DiffParser(patch.diff) files_changed = diff_parser.get_changed_files() collection_id = uuid.uuid4() builds = [] for project in projects: plan_list = get_build_plans(project) if not plan_list: logging.warning('No plans defined for project %s', project.slug) continue if not in_project_files_whitelist(project_options[project.id], files_changed): logging.info('No changed files matched build.file-whitelist for project %s', project.slug) continue builds.append(create_build( project=project, collection_id=collection_id, sha=sha, target=target, label=label, message=message, author=author, patch=patch, )) return self.respond(builds)
def post(self): """ Note: If ``patch`` is specified ``sha`` is assumed to be the original base revision to apply the patch. It is **not** guaranteed to be the rev used to apply the patch. See ``find_green_parent_sha`` for the logic of identifying the correct revision. """ args = self.parser.parse_args() if not (args.project or args.repository or args['repository[phabricator.callsign]']): return error("Project or repository must be specified", problems=["project", "repository", "repository[phabricator.callsign]"]) if args.patch_data: try: patch_data = json.loads(args.patch_data) except Exception: return error("Invalid patch data (must be JSON dict)", problems=["patch[data]"]) if not isinstance(patch_data, dict): return error("Invalid patch data (must be JSON dict)", problems=["patch[data]"]) else: patch_data = None if args.project: projects = [args.project] repository = Repository.query.get(args.project.repository_id) elif args.repository: repository = args.repository projects = list(Project.query.options( subqueryload_all('plans'), ).filter( Project.status == ProjectStatus.active, Project.repository_id == repository.id, )) elif args['repository[phabricator.callsign]']: repository = args['repository[phabricator.callsign]'] projects = list(Project.query.options( subqueryload_all('plans'), ).filter( Project.status == ProjectStatus.active, Project.repository_id == repository.id, )) if not projects: return error("Unable to find project(s).") label = args.label author = args.author message = args.message tag = args.tag if not tag and args.patch_file: tag = 'patch' try: revision = identify_revision(repository, args.sha) except MissingRevision: # if the default fails, we absolutely can't continue and the # client should send a valid revision return error("Unable to find commit %s in %s." % ( args.sha, repository.url), problems=['sha', 'repository']) if revision: if not author: author = revision.author if not label: label = revision.subject # only default the message if its absolutely not set if message is None: message = revision.message sha = revision.sha else: sha = args.sha if not args.target: target = sha[:12] else: target = args.target[:128] if not label: if message: label = message.splitlines()[0] if not label: label = 'A homeless build' label = label[:128] if args.patch_file: fp = StringIO() for line in args.patch_file: fp.write(line) patch_file = fp else: patch_file = None if patch_file: patch = Patch( repository=repository, parent_revision_sha=sha, diff=patch_file.getvalue(), ) db.session.add(patch) else: patch = None project_options = ProjectOptionsHelper.get_options(projects, ['build.file-whitelist']) if patch: diff_parser = DiffParser(patch.diff) files_changed = diff_parser.get_changed_files() else: files_changed = None collection_id = uuid.uuid4() builds = [] for project in projects: plan_list = get_build_plans(project) if not plan_list: logging.warning('No plans defined for project %s', project.slug) continue forced_sha = sha # TODO(dcramer): find_green_parent_sha needs to take branch # into account # if patch_file: # forced_sha = find_green_parent_sha( # project=project, # sha=sha, # ) if files_changed and not in_project_files_whitelist(project_options[project.id], files_changed): logging.info('No changed files matched build.file-whitelist for project %s', project.slug) continue builds.append(create_build( project=project, collection_id=collection_id, sha=forced_sha, target=target, label=label, message=message, author=author, patch=patch, source_data=patch_data, tag=tag, )) return self.respond(builds)
def post(self): """ Note: If ``patch`` is specified ``sha`` is assumed to be the original base revision to apply the patch. It is **not** guaranteed to be the rev used to apply the patch. See ``find_green_parent_sha`` for the logic of identifying the correct revision. """ args = self.parser.parse_args() if not (args.project or args.repository or args['repository[phabricator.callsign]']): return error("Project or repository must be specified", problems=[ "project", "repository", "repository[phabricator.callsign]" ]) if args.patch_data: try: patch_data = json.loads(args.patch_data) except Exception: return error("Invalid patch data (must be JSON dict)", problems=["patch[data]"]) if not isinstance(patch_data, dict): return error("Invalid patch data (must be JSON dict)", problems=["patch[data]"]) else: patch_data = None if args.project: projects = [args.project] repository = Repository.query.get(args.project.repository_id) elif args.repository: repository = args.repository projects = list( Project.query.options(subqueryload_all('plans'), ).filter( Project.status == ProjectStatus.active, Project.repository_id == repository.id, )) elif args['repository[phabricator.callsign]']: repository = args['repository[phabricator.callsign]'] projects = list( Project.query.options(subqueryload_all('plans'), ).filter( Project.status == ProjectStatus.active, Project.repository_id == repository.id, )) if not projects: return error("Unable to find project(s).") label = args.label author = args.author message = args.message tag = args.tag if not tag and args.patch_file: tag = 'patch' try: revision = identify_revision(repository, args.sha) except MissingRevision: # if the default fails, we absolutely can't continue and the # client should send a valid revision return error("Unable to find commit %s in %s." % (args.sha, repository.url), problems=['sha', 'repository']) if revision: if not author: author = revision.author if not label: label = revision.subject # only default the message if its absolutely not set if message is None: message = revision.message sha = revision.sha else: sha = args.sha if not args.target: target = sha[:12] else: target = args.target[:128] if not label: if message: label = message.splitlines()[0] if not label: label = 'A homeless build' label = label[:128] if args.patch_file: fp = StringIO() for line in args.patch_file: fp.write(line) patch_file = fp else: patch_file = None if patch_file: patch = Patch( repository=repository, parent_revision_sha=sha, diff=patch_file.getvalue(), ) db.session.add(patch) else: patch = None project_options = ProjectOptionsHelper.get_options( projects, ['build.file-whitelist']) if patch: diff_parser = DiffParser(patch.diff) files_changed = diff_parser.get_changed_files() else: files_changed = None collection_id = uuid.uuid4() builds = [] for project in projects: plan_list = get_build_plans(project) if not plan_list: logging.warning('No plans defined for project %s', project.slug) continue forced_sha = sha # TODO(dcramer): find_green_parent_sha needs to take branch # into account # if patch_file: # forced_sha = find_green_parent_sha( # project=project, # sha=sha, # ) if files_changed and not in_project_files_whitelist( project_options[project.id], files_changed): logging.info( 'No changed files matched build.file-whitelist for project %s', project.slug) continue builds.append( create_build( project=project, collection_id=collection_id, sha=forced_sha, target=target, label=label, message=message, author=author, patch=patch, source_data=patch_data, tag=tag, )) return self.respond(builds)