def test_get_snapshot_image_given_snapshot(self): project = self.create_project() plan = self.create_plan(project) snapshot = self.create_snapshot(project) current_snapshot = self.create_snapshot(project) db.session.add(ProjectOption( project_id=project.id, name='snapshot.current', value=current_snapshot.id.hex, )) snapshot_image = self.create_snapshot_image(snapshot, plan) current_snapshot_image = self.create_snapshot_image(current_snapshot, plan) assert snapshot_image == SnapshotImage.get(plan, snapshot.id) assert current_snapshot_image == SnapshotImage.get(plan, current_snapshot.id)
def test_get_snapshot_image_dependent(self): project = self.create_project() plan_1 = self.create_plan(project) plan_2 = self.create_plan(project) plan_1.snapshot_plan_id = plan_2.id snapshot = self.create_snapshot(project) db.session.add(ProjectOption( project_id=project.id, name='snapshot.current', value=snapshot.id.hex, )) snapshot_image_1 = self.create_snapshot_image(snapshot, plan_1) snapshot_image_2 = self.create_snapshot_image(snapshot, plan_2) assert snapshot_image_2 == SnapshotImage.get(plan_1, snapshot.id) assert snapshot_image_2 == SnapshotImage.get(plan_2, snapshot.id)
def build_jobplan(cls, plan, job, snapshot_id=None): """Creates and returns a jobplan. Unless a snapshot_id is given, no snapshot will be used. This differs from the build index endpoint where the default is the current snapshot for a project. If a snapshot image is not found for a plan configured to use snapshots, a warning is given. """ from changes.models import ItemOption from changes.models import SnapshotImage plan_steps = sorted(plan.steps, key=lambda x: x.order) option_item_ids = [s.id for s in plan_steps] option_item_ids.append(plan.id) options = defaultdict(dict) options_query = db.session.query( ItemOption.item_id, ItemOption.name, ItemOption.value ).filter( ItemOption.item_id.in_(option_item_ids), ) for item_id, opt_name, opt_value in options_query: options[item_id][opt_name] = opt_value snapshot = { 'steps': [ HistoricalImmutableStep.from_step(s, options[s.id]).to_json() for s in plan_steps ], 'options': options[plan.id], } snapshot_image_id = None # TODO(paulruan): Remove behavior that just having a snapshot plan means # snapshot use is enabled. Just `snapshot.allow` should be sufficient. allow_snapshot = options[plan.id].get('snapshot.allow', False) or plan.snapshot_plan if allow_snapshot and snapshot_id is not None: snapshot_image = SnapshotImage.get(plan, snapshot_id) if snapshot_image is not None: snapshot_image_id = snapshot_image.id if snapshot_image is None: logging.warning("Failed to find snapshot_image for %s's %s.", plan.project.slug, plan.label) instance = cls( plan_id=plan.id, job_id=job.id, build_id=job.build_id, project_id=job.project_id, snapshot_image_id=snapshot_image_id, data={ 'snapshot': snapshot, }, ) return instance
def build_jobplan(cls, plan, job, snapshot_id=None): """Creates and returns a jobplan. Unless a snapshot_id is given, no snapshot will be used. This differs from the build index endpoint where the default is the current snapshot for a project. If a snapshot image is not found for a plan configured to use snapshots, a warning is given. """ from changes.models import ItemOption from changes.models import SnapshotImage plan_steps = sorted(plan.steps, key=lambda x: x.order) option_item_ids = [s.id for s in plan_steps] option_item_ids.append(plan.id) options = defaultdict(dict) options_query = db.session.query( ItemOption.item_id, ItemOption.name, ItemOption.value).filter(ItemOption.item_id.in_(option_item_ids), ) for item_id, opt_name, opt_value in options_query: options[item_id][opt_name] = opt_value snapshot = { 'steps': [ HistoricalImmutableStep.from_step(s, options[s.id]).to_json() for s in plan_steps ], 'options': options[plan.id], } snapshot_image_id = None # TODO(paulruan): Remove behavior that just having a snapshot plan means # snapshot use is enabled. Just `snapshot.allow` should be sufficient. allow_snapshot = '1' == options[plan.id].get('snapshot.allow', '0') or plan.snapshot_plan if allow_snapshot and snapshot_id is not None: snapshot_image = SnapshotImage.get(plan, snapshot_id) if snapshot_image is not None: snapshot_image_id = snapshot_image.id if snapshot_image is None: logging.warning("Failed to find snapshot_image for %s's %s.", plan.project.slug, plan.label) instance = cls( plan_id=plan.id, job_id=job.id, build_id=job.build_id, project_id=job.project_id, snapshot_image_id=snapshot_image_id, data={ 'snapshot': snapshot, }, ) return instance
def test_get_snapshot_image_dependent(self): project = self.create_project() plan_1 = self.create_plan(project) plan_2 = self.create_plan(project) plan_1.snapshot_plan_id = plan_2.id snapshot = self.create_snapshot(project) db.session.add( ProjectOption( project_id=project.id, name='snapshot.current', value=snapshot.id.hex, )) snapshot_image_1 = self.create_snapshot_image(snapshot, plan_1) snapshot_image_2 = self.create_snapshot_image(snapshot, plan_2) assert snapshot_image_2 == SnapshotImage.get(plan_1, snapshot.id) assert snapshot_image_2 == SnapshotImage.get(plan_2, snapshot.id)
def test_get_snapshot_image_given_snapshot(self): project = self.create_project() plan = self.create_plan(project) snapshot = self.create_snapshot(project) current_snapshot = self.create_snapshot(project) db.session.add( ProjectOption( project_id=project.id, name='snapshot.current', value=current_snapshot.id.hex, )) snapshot_image = self.create_snapshot_image(snapshot, plan) current_snapshot_image = self.create_snapshot_image( current_snapshot, plan) assert snapshot_image == SnapshotImage.get(plan, snapshot.id) assert current_snapshot_image == SnapshotImage.get( plan, current_snapshot.id)
def create_snapshot_image(self, snapshot, plan, **kwargs): image = SnapshotImage( snapshot=snapshot, plan=plan, **kwargs ) db.session.add(image) db.session.commit() return image
def post(self): """ Create a new commit or diff build. The API roughly goes like this: 1. Identify the project(s) to build for. This can be done by specifying ``project``, ``repository``, or ``repository[callsign]``. If a repository is specified somehow, then all projects for that repository are considered for building. 2. Using the ``sha``, find the appropriate revision object. This may involve updating the repo. 3. If ``patch`` is given, then apply the patch and mark this as a diff build. Otherwise, this is a commit build. 4. If ``snapshot_id`` is given, verify that the snapshot can be used by all projects. 5. If provided, apply project_whitelist, filtering out projects not in this whitelist. 6. Based on the flag ``apply_project_files_trigger`` (see comment on the argument itself for default values), decide whether or not to filter out projects by file blacklist and whitelist. 7. Attach metadata and create/ensure existence of a build for each project, depending on the flag ``ensure_only``. NOTE: In ensure-only mode, the collection_ids of the returned builds are not necessarily identical, as we give new builds new collection IDs and preserve the existing builds' collection IDs. NOTE: If ``patch`` is specified ``sha`` is assumed to be the original base revision to apply the patch. Not relevant until we fix TODO: ``sha`` is **not** guaranteed to be the rev used to apply the patch. See ``find_green_parent_sha`` for the logic of identifying the correct revision. """ args = self.parser.parse_args() if args.patch_file and args.ensure_only: return self.handle_failure( "Ensure-only mode does not work with a diff build yet.", problems=["patch", "ensure_only"], diff=args.target, ) if not (args.project or args.repository or args["repository[phabricator.callsign]"]): return self.handle_failure( "Project or repository must be specified", problems=["project", "repository", "repository[phabricator.callsign]"], diff=args.target, ) # read arguments if args.patch_data: try: patch_data = json.loads(args.patch_data) except Exception: return self.handle_failure( "Invalid patch data (must be JSON dict)", problems=["patch[data]"], diff=args.target ) if not isinstance(patch_data, dict): return self.handle_failure( "Invalid patch data (must be JSON dict)", problems=["patch[data]"], diff=args.target ) else: patch_data = None # 1. identify project(s) projects, repository = try_get_projects_and_repository(args) if not projects: return self.handle_failure("Unable to find project(s).", diff=args.target) # read arguments label = args.label author = args.author message = args.message tag = args.tag snapshot_id = args.snapshot_id no_snapshot = args.no_snapshot if no_snapshot and snapshot_id: return self.handle_failure("Cannot specify snapshot with no_snapshot option") if not tag and args.patch_file: tag = "patch" # 2. validate snapshot if snapshot_id: snapshot = Snapshot.query.get(snapshot_id) if not snapshot: return self.handle_failure("Unable to find snapshot.", diff=args.target) if snapshot.status != SnapshotStatus.active: return self.handle_failure("Snapshot is in an invalid state: %s" % snapshot.status, diff=args.target) for project in projects: plans = get_build_plans(project) for plan in plans: plan_options = plan.get_item_options() allow_snapshot = "1" == plan_options.get("snapshot.allow", "0") or plan.snapshot_plan if allow_snapshot and not SnapshotImage.get(plan, snapshot_id): # We want to create a build using a specific snapshot but no image # was found for this plan so fail. return self.handle_failure( "Snapshot cannot be applied to %s's %s" % (project.slug, plan.label), diff=args.target ) # 3. find revision try: revision = identify_revision(repository, args.sha) except MissingRevision: # if the default fails, we absolutely can't continue and the # client should send a valid revision return self.handle_failure( "Unable to find commit %s in %s." % (args.sha, repository.url), problems=["sha", "repository"], diff=args.target, ) # get default values for arguments if revision: if not author: author = revision.author if not label: label = revision.subject # only default the message if its absolutely not set if message is None: message = revision.message sha = revision.sha else: sha = args.sha if not args.target: target = sha[:12] else: target = args.target[:128] if not label: if message: label = message.splitlines()[0] if not label: label = "A homeless build" label = label[:128] # 4. Check for patch if args.patch_file: fp = StringIO() for line in args.patch_file: fp.write(line) patch_file = fp else: patch_file = None if patch_file: patch = Patch(repository=repository, parent_revision_sha=sha, diff=patch_file.getvalue()) db.session.add(patch) else: patch = None project_options = ProjectOptionsHelper.get_options(projects, ["build.file-whitelist"]) # mark as commit or diff build if not patch: is_commit_build = True else: is_commit_build = False apply_project_files_trigger = args.apply_project_files_trigger if apply_project_files_trigger is None: apply_project_files_trigger = args.apply_file_whitelist if apply_project_files_trigger is None: if is_commit_build: apply_project_files_trigger = False else: apply_project_files_trigger = True if apply_project_files_trigger: if patch: diff_parser = DiffParser(patch.diff) files_changed = diff_parser.get_changed_files() elif revision: try: files_changed = _get_revision_changed_files(repository, revision) except MissingRevision: return self.handle_failure( "Unable to find commit %s in %s." % (args.sha, repository.url), problems=["sha", "repository"], diff=args.target, ) else: # the only way that revision can be null is if this repo does not have a vcs backend logging.warning( "Revision and patch are both None for sha %s. This is because the repo %s does not have a VCS backend.", sha, repository.url, ) files_changed = None else: # we won't be applying file whitelist, so there is no need to get the list of changed files. files_changed = None collection_id = uuid.uuid4() builds = [] for project in projects: plan_list = get_build_plans(project) if not plan_list: logging.warning("No plans defined for project %s", project.slug) continue # 5. apply project whitelist as appropriate if args.project_whitelist is not None and project.slug not in args.project_whitelist: logging.info("Project %s is not in the supplied whitelist", project.slug) continue forced_sha = sha # TODO(dcramer): find_green_parent_sha needs to take branch # into account # if patch_file: # forced_sha = find_green_parent_sha( # project=project, # sha=sha, # ) # 6. apply file whitelist as appropriate diff = None if patch is not None: diff = patch.diff try: if ( apply_project_files_trigger and files_changed is not None and not files_changed_should_trigger_project( files_changed, project, project_options[project.id], sha, diff ) ): logging.info("Changed files do not trigger build for project %s", project.slug) continue except InvalidDiffError: # ok, the build will fail and the user will be notified. pass except ProjectConfigError: author_name = "(Unknown)" if author: author_name = author.name logging.error( "Project config for project %s is not in a valid format. Author is %s.", project.slug, author_name, exc_info=True, ) # 7. create/ensure build if args.ensure_only: potentials = list( Build.query.filter(Build.project_id == project.id, Build.source.has(revision_sha=sha, patch=patch)) .order_by(Build.date_created.desc()) # newest first .limit(1) ) if len(potentials) == 0: builds.append( create_build( project=project, collection_id=collection_id, sha=forced_sha, target=target, label=label, message=message, author=author, patch=patch, source_data=patch_data, tag=tag, snapshot_id=snapshot_id, no_snapshot=no_snapshot, ) ) else: builds.append(potentials[0]) else: builds.append( create_build( project=project, collection_id=collection_id, sha=forced_sha, target=target, label=label, message=message, author=author, patch=patch, source_data=patch_data, tag=tag, snapshot_id=snapshot_id, no_snapshot=no_snapshot, ) ) return self.respond(builds)
def post(self): """ Create a new commit or diff build. The API roughly goes like this: 1. Identify the project(s) to build for. This can be done by specifying ``project``, ``repository``, or ``repository[callsign]``. If a repository is specified somehow, then all projects for that repository are considered for building. 2. Using the ``sha``, find the appropriate revision object. This may involve updating the repo. 3. If ``patch`` is given, then apply the patch and mark this as a diff build. Otherwise, this is a commit build. 4. If ``snapshot_id`` is given, verify that the snapshot can be used by all projects. 5. If provided, apply project_whitelist, filtering out projects not in this whitelist. 6. Based on the flag ``apply_project_files_trigger`` (see comment on the argument itself for default values), decide whether or not to filter out projects by file blacklist and whitelist. 7. Attach metadata and create/ensure existence of a build for each project, depending on the flag ``ensure_only``. NOTE: In ensure-only mode, the collection_ids of the returned builds are not necessarily identical, as we give new builds new collection IDs and preserve the existing builds' collection IDs. NOTE: If ``patch`` is specified ``sha`` is assumed to be the original base revision to apply the patch. Not relevant until we fix TODO: ``sha`` is **not** guaranteed to be the rev used to apply the patch. See ``find_green_parent_sha`` for the logic of identifying the correct revision. """ args = self.parser.parse_args() if args.patch_file and args.ensure_only: return error( "Ensure-only mode does not work with a diff build yet.", problems=["patch", "ensure_only"]) if not (args.project or args.repository or args['repository[phabricator.callsign]']): return error("Project or repository must be specified", problems=[ "project", "repository", "repository[phabricator.callsign]" ]) # read arguments if args.patch_data: try: patch_data = json.loads(args.patch_data) except Exception: return error("Invalid patch data (must be JSON dict)", problems=["patch[data]"]) if not isinstance(patch_data, dict): return error("Invalid patch data (must be JSON dict)", problems=["patch[data]"]) else: patch_data = None # 1. identify project(s) projects, repository = try_get_projects_and_repository(args) if not projects: return error("Unable to find project(s).") # read arguments label = args.label author = args.author message = args.message tag = args.tag snapshot_id = args.snapshot_id no_snapshot = args.no_snapshot if no_snapshot and snapshot_id: return error("Cannot specify snapshot with no_snapshot option") if not tag and args.patch_file: tag = 'patch' # 2. validate snapshot if snapshot_id: snapshot = Snapshot.query.get(snapshot_id) if not snapshot: return error("Unable to find snapshot.") if snapshot.status != SnapshotStatus.active: return error("Snapshot is in an invalid state: %s" % snapshot.status) for project in projects: plans = get_build_plans(project) for plan in plans: plan_options = plan.get_item_options() allow_snapshot = '1' == plan_options.get( 'snapshot.allow', '0') or plan.snapshot_plan if allow_snapshot and not SnapshotImage.get( plan, snapshot_id): # We want to create a build using a specific snapshot but no image # was found for this plan so fail. return error("Snapshot cannot be applied to %s's %s" % (project.slug, plan.label)) # 3. find revision try: revision = identify_revision(repository, args.sha) except MissingRevision: # if the default fails, we absolutely can't continue and the # client should send a valid revision return error("Unable to find commit %s in %s." % (args.sha, repository.url), problems=['sha', 'repository']) # get default values for arguments if revision: if not author: author = revision.author if not label: label = revision.subject # only default the message if its absolutely not set if message is None: message = revision.message sha = revision.sha else: sha = args.sha if not args.target: target = sha[:12] else: target = args.target[:128] if not label: if message: label = message.splitlines()[0] if not label: label = 'A homeless build' label = label[:128] # 4. Check for patch if args.patch_file: fp = StringIO() for line in args.patch_file: fp.write(line) patch_file = fp else: patch_file = None if patch_file: patch = Patch( repository=repository, parent_revision_sha=sha, diff=patch_file.getvalue(), ) db.session.add(patch) else: patch = None project_options = ProjectOptionsHelper.get_options( projects, ['build.file-whitelist']) # mark as commit or diff build if not patch: is_commit_build = True else: is_commit_build = False apply_project_files_trigger = args.apply_project_files_trigger if apply_project_files_trigger is None: apply_project_files_trigger = args.apply_file_whitelist if apply_project_files_trigger is None: if is_commit_build: apply_project_files_trigger = False else: apply_project_files_trigger = True if apply_project_files_trigger: if patch: diff_parser = DiffParser(patch.diff) files_changed = diff_parser.get_changed_files() elif revision: try: files_changed = _get_revision_changed_files( repository, revision) except MissingRevision: return error("Unable to find commit %s in %s." % (args.sha, repository.url), problems=['sha', 'repository']) else: # the only way that revision can be null is if this repo does not have a vcs backend logging.warning( 'Revision and patch are both None for sha %s. This is because the repo %s does not have a VCS backend.', sha, repository.url) files_changed = None else: # we won't be applying file whitelist, so there is no need to get the list of changed files. files_changed = None collection_id = uuid.uuid4() builds = [] for project in projects: plan_list = get_build_plans(project) if not plan_list: logging.warning('No plans defined for project %s', project.slug) continue # 5. apply project whitelist as appropriate if args.project_whitelist is not None and project.slug not in args.project_whitelist: logging.info('Project %s is not in the supplied whitelist', project.slug) continue forced_sha = sha # TODO(dcramer): find_green_parent_sha needs to take branch # into account # if patch_file: # forced_sha = find_green_parent_sha( # project=project, # sha=sha, # ) # 6. apply file whitelist as appropriate diff = None if patch is not None: diff = patch.diff try: if (apply_project_files_trigger and files_changed is not None and not files_changed_should_trigger_project( files_changed, project, project_options[project.id], sha, diff)): logging.info( 'Changed files do not trigger build for project %s', project.slug) continue except InvalidDiffError: # ok, the build will fail and the user will be notified. pass except ProjectConfigError: author_name = '(Unknown)' if author: author_name = author.name logging.error( 'Project config for project %s is not in a valid format. Author is %s.', project.slug, author_name, exc_info=True) # 7. create/ensure build if args.ensure_only: potentials = list( Build.query.filter( Build.project_id == project.id, Build.source.has(revision_sha=sha, patch=patch), ).order_by(Build.date_created.desc() # newest first ).limit(1)) if len(potentials) == 0: builds.append( create_build(project=project, collection_id=collection_id, sha=forced_sha, target=target, label=label, message=message, author=author, patch=patch, source_data=patch_data, tag=tag, snapshot_id=snapshot_id, no_snapshot=no_snapshot)) else: builds.append(potentials[0]) else: builds.append( create_build(project=project, collection_id=collection_id, sha=forced_sha, target=target, label=label, message=message, author=author, patch=patch, source_data=patch_data, tag=tag, snapshot_id=snapshot_id, no_snapshot=no_snapshot)) return self.respond(builds)
def post(self, project_id): """Initiates a new snapshot for this project.""" project = Project.get(project_id) if not project: return '', 404 args = self.post_parser.parse_args() repository = project.repository try: revision = identify_revision(repository, args.sha) except MissingRevision: # if the default fails, we absolutely can't continue and the # client should send a valid revision return '{"error": "Unable to find a matching revision."}', 400 if revision: sha = revision.sha else: sha = args.sha plan_list = get_snapshottable_plans(project) if not plan_list: return '{"error": "No snapshottable plans associated with project."}', 400 source, _ = get_or_create(Source, where={ 'repository': repository, 'revision_sha': sha, 'patch_id': None, }) build = Build( source_id=source.id, source=source, project_id=project.id, project=project, label='Create Snapshot', status=Status.queued, cause=Cause.snapshot, target=sha[:12], ) db.session.add(build) # TODO(dcramer): this needs to update with the build result snapshot = Snapshot( project_id=project.id, source_id=source.id, build_id=build.id, status=SnapshotStatus.pending, ) db.session.add(snapshot) jobs = [] for plan in plan_list: job = Job( build=build, build_id=build.id, project=project, project_id=project.id, source=build.source, source_id=build.source_id, status=build.status, label='Create Snapshot: %s' % (plan.label, ), ) db.session.add(job) jobplan = JobPlan.build_jobplan(plan, job) db.session.add(jobplan) image = SnapshotImage( job=job, snapshot=snapshot, plan=plan, ) db.session.add(image) jobs.append(job) db.session.commit() for job in jobs: create_job.delay( job_id=job.id.hex, task_id=job.id.hex, parent_task_id=job.build_id.hex, ) db.session.commit() sync_build.delay( build_id=build.id.hex, task_id=build.id.hex, ) return self.respond(snapshot)