def generic_build(request, project_id_or_slug=None): try: project = Project.objects.get(pk=project_id_or_slug) # Allow slugs too except (Project.DoesNotExist, ValueError): try: project = Project.objects.get(slug=project_id_or_slug) except (Project.DoesNotExist, ValueError): pc_log.error( "(Incoming Generic Build) Repo not found: %s" % ( project_id_or_slug)) return HttpResponseNotFound( 'Repo not found: %s' % project_id_or_slug) if request.method == 'POST': slug = request.POST.get('version_slug', None) if slug: pc_log.info( "(Incoming Generic Build) %s [%s]" % (project.slug, slug)) _build_version(project, slug) else: pc_log.info( "(Incoming Generic Build) %s [%s]" % (project.slug, LATEST)) trigger_build(project=project, force=True) else: return HttpResponse("You must POST to this resource.") return redirect('builds_project_list', project.slug)
def test_trigger_build_when_version_not_provided_default_version_exist(self, update_docs_task): self.assertFalse(Version.objects.filter(slug='test-default-version').exists()) project_1 = get(Project) version_1 = get(Version, project=project_1, slug='test-default-version', active=True) project_1.default_version = 'test-default-version' project_1.save() default_version = project_1.get_default_version() self.assertEqual(default_version, 'test-default-version') trigger_build(project=project_1) kwargs = { 'version_pk': version_1.pk, 'record': True, 'force': False, 'build_pk': mock.ANY, } update_docs_task.signature.assert_has_calls([ mock.call( args=(project_1.pk,), kwargs=kwargs, options=mock.ANY, immutable=True, ), ])
def handle(self, *args, **options): record = options['record'] force = options['force'] version = options['version'] if len(args): for slug in args: if version and version != "all": log.info("Updating version %s for %s" % (version, slug)) for version in Version.objects.filter(project__slug=slug, slug=version): trigger_build(project=version.project, version=version) elif version == "all": log.info("Updating all versions for %s" % slug) for version in Version.objects.filter(project__slug=slug, active=True, uploaded=False): tasks.update_docs(pk=version.project_id, record=False, version_pk=version.pk) else: p = Project.all_objects.get(slug=slug) log.info("Building %s" % p) trigger_build(project=p, force=force) else: if version == "all": log.info("Updating all versions") for version in Version.objects.filter(active=True, uploaded=False): tasks.update_docs(pk=version.project_id, record=record, force=force, version_pk=version.pk) else: log.info("Updating all docs") tasks.update_docs_pull(record=record, force=force)
def get(self, request, *args, **kwargs): """Process link request as a form post to the project import form""" self.request = request self.args = args self.kwargs = kwargs data = self.get_form_data() project = (Project.objects.for_admin_user(request.user) .filter(repo=data['repo']).first()) if project is not None: messages.success( request, _('The demo project is already imported!')) else: kwargs = self.get_form_kwargs() form = self.form_class(data=data, **kwargs) if form.is_valid(): project = form.save() project.save() trigger_build(project, basic=True) messages.success( request, _('Your demo project is currently being imported')) else: for (__, msg) in form.errors.items(): log.error(msg) messages.error(request, _('There was a problem adding the demo project')) return HttpResponseRedirect(reverse('projects_dashboard')) return HttpResponseRedirect(reverse('projects_detail', args=[project.slug]))
def done(self, form_list, **kwargs): """Save form data as object instance Don't save form data directly, instead bypass documentation building and other side effects for now, by signalling a save without commit. Then, finish by added the members to the project and saving. """ form_data = self.get_all_cleaned_data() extra_fields = ProjectExtraForm.Meta.fields # expect the first form basics_form = form_list[0] # Save the basics form to create the project instance, then alter # attributes directly from other forms project = basics_form.save() tags = form_data.pop('tags', []) for tag in tags: project.tags.add(tag) for field, value in form_data.items(): if field in extra_fields: setattr(project, field, value) basic_only = True project.save() project_import.send(sender=project, request=self.request) trigger_build(project, basic=basic_only) return HttpResponseRedirect(reverse('projects_detail', args=[project.slug]))
def handle(self, *args, **options): force = options['force'] version = options['version'] if options.get('slugs', []): for slug in options['slugs']: if version and version != 'all': log.info('Updating version %s for %s', version, slug) for version in Version.objects.filter( project__slug=slug, slug=version, ): trigger_build(project=version.project, version=version) elif version == 'all': log.info('Updating all versions for %s', slug) for version in Version.objects.filter( project__slug=slug, active=True, uploaded=False, ): build = Build.objects.create( project=version.project, version=version, type='html', state='triggered', ) # pylint: disable=no-value-for-parameter tasks.update_docs_task( version.project_id, build_pk=build.pk, version_pk=version.pk, ) else: p = Project.all_objects.get(slug=slug) log.info('Building %s', p) trigger_build(project=p, force=force) else: if version == 'all': log.info('Updating all versions') for version in Version.objects.filter( active=True, uploaded=False, ): # pylint: disable=no-value-for-parameter tasks.update_docs_task( version.project_id, force=force, version_pk=version.pk, ) else: log.info('Updating all docs') for project in Project.objects.all(): # pylint: disable=no-value-for-parameter tasks.update_docs_task( project.pk, force=force, )
def sync_versions(self, request, **kwargs): # noqa: D205 """ Sync the version data in the repo (on the build server) with what we have in the database. Returns the identifiers for the versions that have been deleted. """ project = get_object_or_404( Project.objects.api(request.user), pk=kwargs['pk']) # If the currently highest non-prerelease version is active, then make # the new latest version active as well. old_highest_version = determine_stable_version(project.versions.all()) if old_highest_version is not None: activate_new_stable = old_highest_version.active else: activate_new_stable = False try: # Update All Versions data = request.data added_versions = set() if 'tags' in data: ret_set = api_utils.sync_versions( project=project, versions=data['tags'], type=TAG) added_versions.update(ret_set) if 'branches' in data: ret_set = api_utils.sync_versions( project=project, versions=data['branches'], type=BRANCH) added_versions.update(ret_set) deleted_versions = api_utils.delete_versions(project, data) except Exception as e: log.exception("Sync Versions Error: %s", e.message) return Response({'error': e.message}, status=status.HTTP_400_BAD_REQUEST) promoted_version = project.update_stable_version() if promoted_version: new_stable = project.get_stable_version() log.info( "Triggering new stable build: {project}:{version}".format( project=project.slug, version=new_stable.identifier)) trigger_build(project=project, version=new_stable) # Marking the tag that is considered the new stable version as # active and building it if it was just added. if ( activate_new_stable and promoted_version.slug in added_versions): promoted_version.active = True promoted_version.save() trigger_build(project=project, version=promoted_version) return Response({ 'added_versions': added_versions, 'deleted_versions': deleted_versions, })
def handle(self, *args, **options): record = options['record'] force = options['force'] version = options['version'] if args: for slug in args: if version and version != "all": log.info("Updating version %s for %s", version, slug) for version in Version.objects.filter(project__slug=slug, slug=version): trigger_build(project=version.project, version=version) elif version == "all": log.info("Updating all versions for %s", slug) for version in Version.objects.filter(project__slug=slug, active=True, uploaded=False): build_pk = None if record: build = Build.objects.create( project=version.project, version=version, type='html', state='triggered', ) build_pk = build.pk tasks.UpdateDocsTask().run( pk=version.project_id, build_pk=build_pk, record=record, version_pk=version.pk ) else: p = Project.all_objects.get(slug=slug) log.info("Building %s", p) trigger_build(project=p, force=force, record=record) else: if version == "all": log.info("Updating all versions") for version in Version.objects.filter(active=True, uploaded=False): tasks.UpdateDocsTask().run( pk=version.project_id, record=record, force=force, version_pk=version.pk ) else: log.info("Updating all docs") for project in Project.objects.all(): tasks.UpdateDocsTask().run( pk=project.pk, record=record, force=force )
def build_default_version(self, request, queryset): """Trigger a build for the project version.""" total = 0 for project in queryset: trigger_build(project=project) total += 1 messages.add_message( request, messages.INFO, 'Triggered builds for {} project(s).'.format(total), )
def post(self, request, project_slug): project = get_object_or_404( Project.objects.for_admin_user(self.request.user), slug=project_slug ) version_slug = request.POST.get('version_slug') version = get_object_or_404( Version, project=project, slug=version_slug, ) trigger_build(project=project, version=version) return HttpResponseRedirect(reverse('builds_project_list', args=[project.slug]))
def save_version(self, version): new_value = self.cleaned_data.get('version-%s' % version.slug, None) privacy_level = self.cleaned_data.get('privacy-%s' % version.slug, None) if ((new_value is None or new_value == version.active) and (privacy_level is None or privacy_level == version.privacy_level)): return version.active = new_value version.privacy_level = privacy_level version.save() if version.active and not version.built and not version.uploaded: trigger_build(project=self.project, version=version)
def build_version(self, request, queryset): """Trigger a build for the project version.""" total = 0 for version in queryset: trigger_build( project=version.project, version=version, ) total += 1 messages.add_message( request, messages.INFO, 'Triggered builds for {} version(s).'.format(total), )
def post(self, request, project_slug): project = get_object_or_404(Project, slug=project_slug) if not AdminPermission.is_admin(request.user, project): return HttpResponseForbidden() version_slug = request.POST.get('version_slug') version = get_object_or_404( Version, project=project, slug=version_slug, ) trigger_build(project=project, version=version) return HttpResponseRedirect(reverse('builds_project_list', args=[project.slug]))
def test_trigger_build_time_limit(self, update_docs): """Pass of time limit""" trigger_build(project=self.project, version=self.version) update_docs().si.assert_has_calls([ mock.call( self.project.pk, time_limit=720, soft_time_limit=600, queue=mock.ANY, force=False, record=True, build_pk=mock.ANY, version_pk=self.version.id, ), ]) update_docs().si().apply_async.assert_called()
def post(self, request, project_slug): project = get_object_or_404(Project, slug=project_slug) if not AdminPermission.is_admin(request.user, project): return HttpResponseForbidden() version_slug = request.POST.get('version_slug') version = get_object_or_404( Version, project=project, slug=version_slug, ) update_docs_task, build = trigger_build( project=project, version=version, ) if (update_docs_task, build) == (None, None): # Build was skipped messages.add_message( request, messages.WARNING, "This project is currently disabled and can't trigger new builds.", ) return HttpResponseRedirect( reverse('builds_project_list', args=[project.slug]), ) return HttpResponseRedirect( reverse('builds_detail', args=[project.slug, build.pk]), )
def test_trigger_build_rounded_time_limit(self, update_docs): """Time limit should round down""" self.project.container_time_limit = 3 trigger_build(project=self.project, version=self.version) update_docs().si.assert_has_calls([ mock.call( self.project.pk, time_limit=3, soft_time_limit=3, queue=mock.ANY, force=False, record=True, build_pk=mock.ANY, version_pk=self.version.id, ), ]) update_docs().si().apply_async.assert_called()
def save_version(self, version): """Save version if there has been a change, trigger a rebuild.""" new_value = self.cleaned_data.get( 'version-{}'.format(version.slug), None, ) privacy_level = self.cleaned_data.get( 'privacy-{}'.format(version.slug), None, ) if ((new_value is None or new_value == version.active) and (privacy_level is None or privacy_level == version.privacy_level)): # yapf: disable # noqa return version.active = new_value version.privacy_level = privacy_level version.save() if version.active and not version.built and not version.uploaded: trigger_build(project=self.project, version=version)
def test_trigger_build_time_limit(self, update_docs): """Pass of time limit""" trigger_build(project=self.project, version=self.version) update_docs.assert_has_calls([ mock.call.apply_async( time_limit=720, soft_time_limit=600, queue=mock.ANY, kwargs={ 'pk': self.project.id, 'force': False, 'basic': False, 'record': True, 'build_pk': mock.ANY, 'version_pk': self.version.id } ) ])
def test_trigger_skipped_project(self, update_docs_task): self.project.skip = True self.project.save() result = trigger_build( project=self.project, version=self.version, ) self.assertEqual(result, (None, None)) self.assertFalse(update_docs_task.signature.called) self.assertFalse(update_docs_task.signature().apply_async.called)
def test_trigger_build_invalid_time_limit(self, update_docs): """Time limit as string""" self.project.container_time_limit = '200s' trigger_build(project=self.project, version=self.version) update_docs().apply_async.assert_has_calls([ mock.call( time_limit=720, soft_time_limit=600, queue=mock.ANY, kwargs={ 'pk': self.project.id, 'force': False, 'basic': False, 'record': True, 'build_pk': mock.ANY, 'version_pk': self.version.id } ) ])
def test_trigger_build_rounded_time_limit(self, update_docs): """Time limit should round down""" self.project.container_time_limit = 3 trigger_build(project=self.project, version=self.version) update_docs.assert_has_calls([ mock.call.apply_async( time_limit=3, soft_time_limit=3, queue=mock.ANY, kwargs={ 'pk': self.project.id, 'force': False, 'basic': False, 'record': True, 'build_pk': mock.ANY, 'version_pk': self.version.id } ) ])
def done(self, form_list, **kwargs): """Save form data as object instance Don't save form data directly, instead bypass documentation building and other side effects for now, by signalling a save without commit. Then, finish by added the members to the project and saving. """ # expect the first form basics_form = form_list[0] # Save the basics form to create the project instance, then alter # attributes directly from other forms project = basics_form.save() for form in form_list[1:]: for (field, value) in form.cleaned_data.items(): setattr(project, field, value) basic_only = True project.save() project_import.send(sender=project, request=self.request) trigger_build(project, basic=basic_only) return HttpResponseRedirect(reverse("projects_detail", args=[project.slug]))
def _build_version(project, slug, already_built=()): default = project.default_branch or (project.vcs_repo().fallback_branch) if slug == default and slug not in already_built: # short circuit versions that are default # these will build at "latest", and thus won't be # active latest_version = project.versions.get(slug=LATEST) trigger_build(project=project, version=latest_version, force=True) pc_log.info(("(Version build) Building %s:%s" % (project.slug, latest_version.slug))) if project.versions.exclude(active=False).filter(slug=slug).exists(): # Handle the case where we want to build the custom branch too slug_version = project.versions.get(slug=slug) trigger_build(project=project, version=slug_version, force=True) pc_log.info(("(Version build) Building %s:%s" % (project.slug, slug_version.slug))) return LATEST elif project.versions.exclude(active=True).filter(slug=slug).exists(): pc_log.info(("(Version build) Not Building %s" % slug)) return None elif slug not in already_built: version = project.versions.get(slug=slug) trigger_build(project=project, version=version, force=True) pc_log.info(("(Version build) Building %s:%s" % (project.slug, version.slug))) return slug else: pc_log.info(("(Version build) Not Building %s" % slug)) return None
def test_trigger_build_time_limit(self, update_docs): """Pass of time limit.""" trigger_build(project=self.project, version=self.version) kwargs = { 'version_pk': self.version.pk, 'record': True, 'force': False, 'build_pk': mock.ANY, } options = { 'queue': mock.ANY, 'time_limit': 720, 'soft_time_limit': 600, } update_docs.signature.assert_has_calls([ mock.call( args=(self.project.pk,), kwargs=kwargs, options=options, immutable=True, ), ]) update_docs.signature().apply_async.assert_called()
def test_trigger_build_when_version_not_provided_default_version_doesnt_exist(self, update_docs_task): trigger_build(project=self.project) default_version = self.project.get_default_version() version_ = self.project.versions.get(slug=default_version) self.assertEqual(version_.slug, LATEST) kwargs = { 'version_pk': version_.pk, 'record': True, 'force': False, 'build_pk': mock.ANY, } update_docs_task.signature.assert_has_calls([ mock.call( args=(self.project.pk,), kwargs=kwargs, options=mock.ANY, immutable=True, ), ])
def _build_version(project, slug, already_built=()): """ Where we actually trigger builds for a project and slug. All webhook logic should route here to call ``trigger_build``. """ if not project.has_valid_webhook: project.has_valid_webhook = True project.save() # Previously we were building the latest version (inactive or active) # when building the default version, # some users may have relied on this to update the version list #4450 version = project.versions.filter(active=True, slug=slug).first() if version and slug not in already_built: log.info( '(Version build) Building %s:%s', project.slug, version.slug, ) trigger_build(project=project, version=version, force=True) return slug log.info('(Version build) Not Building %s', slug) return None
def test_trigger_custom_queue(self, update_docs): """Use a custom queue when routing the task.""" self.project.build_queue = 'build03' trigger_build(project=self.project, version=self.version) kwargs = { 'version_pk': self.version.pk, 'record': True, 'force': False, 'build_pk': mock.ANY, } options = { 'queue': 'build03', 'time_limit': 720, 'soft_time_limit': 600, } update_docs.signature.assert_has_calls([ mock.call( args=(self.project.pk,), kwargs=kwargs, options=options, immutable=True, ), ]) update_docs.signature().apply_async.assert_called()
def create(self, request, **kwargs): # pylint: disable=arguments-differ project = self._get_parent_project() version = self._get_parent_version() _, build = trigger_build(project, version=version) # TODO: refactor this to be a serializer # BuildTriggeredSerializer(build, project, version).data data = { 'build': BuildSerializer(build).data, 'project': ProjectSerializer(project).data, 'version': VersionSerializer(build.version).data, } if build: data.update({'triggered': True}) status = 202 else: data.update({'triggered': False}) status = 400 return Response(data=data, status=status)
def _build_version(project, slug, already_built=()): """ Where we actually trigger builds for a project and slug. All webhook logic should route here to call ``trigger_build``. """ default = project.default_branch or (project.vcs_repo().fallback_branch) if not project.has_valid_webhook: project.has_valid_webhook = True project.save() if slug == default and slug not in already_built: # short circuit versions that are default # these will build at "latest", and thus won't be # active latest_version = project.versions.get(slug=LATEST) trigger_build(project=project, version=latest_version, force=True) log.info(("(Version build) Building %s:%s" % (project.slug, latest_version.slug))) if project.versions.exclude(active=False).filter(slug=slug).exists(): # Handle the case where we want to build the custom branch too slug_version = project.versions.get(slug=slug) trigger_build(project=project, version=slug_version, force=True) log.info(("(Version build) Building %s:%s" % (project.slug, slug_version.slug))) return LATEST elif project.versions.exclude(active=True).filter(slug=slug).exists(): log.info(("(Version build) Not Building %s" % slug)) return None elif slug not in already_built: version = project.versions.get(slug=slug) trigger_build(project=project, version=version, force=True) log.info(("(Version build) Building %s:%s" % (project.slug, version.slug))) return slug else: log.info(("(Version build) Not Building %s" % slug)) return None
def handle(self, *args, **options): force = options['force'] version = options['version'] if options.get('slugs', []): for slug in options['slugs']: if version and version != 'all': log.info('Updating version %s for %s', version, slug) for version in Version.objects.filter( project__slug=slug, slug=version, ): trigger_build(project=version.project, version=version) elif version == 'all': log.info('Updating all versions for %s', slug) for version in Version.objects.filter( project__slug=slug, active=True, uploaded=False, ): build = Build.objects.create( project=version.project, version=version, type='html', state='triggered', ) # pylint: disable=no-value-for-parameter tasks.update_docs_task( version.pk, build_pk=build.pk, ) elif version == INTERNAL: log.info('Updating all internal versions for %s', slug) for version in Version.internal.filter( project__slug=slug, active=True, uploaded=False, ): build = Build.objects.create( project=version.project, version=version, type='html', state='triggered', ) # pylint: disable=no-value-for-parameter tasks.update_docs_task( version.project_id, build_pk=build.pk, version_pk=version.pk, ) elif version == EXTERNAL: log.info('Updating all external versions for %s', slug) for version in Version.external.filter( project__slug=slug, active=True, uploaded=False, ): build = Build.objects.create( project=version.project, version=version, type='html', state='triggered', ) # pylint: disable=no-value-for-parameter tasks.update_docs_task( version.project_id, build_pk=build.pk, version_pk=version.pk, ) else: p = Project.all_objects.get(slug=slug) log.info('Building %s', p) trigger_build(project=p, force=force) else: if version == 'all': log.info('Updating all versions') for version in Version.objects.filter( active=True, uploaded=False, ): # pylint: disable=no-value-for-parameter tasks.update_docs_task( version.pk, force=force, ) else: log.info('Updating all docs') for project in Project.objects.all(): # pylint: disable=no-value-for-parameter default_version = project.get_default_version() version = project.versions.get(slug=default_version) tasks.update_docs_task( version.pk, force=force, )
def sync_versions(self, request, **kwargs): # noqa: D205 """ Sync the version data in the repo (on the build server). Version data in the repo is synced with what we have in the database. :returns: the identifiers for the versions that have been deleted. """ project = get_object_or_404( Project.objects.api(request.user), pk=kwargs['pk'], ) # If the currently highest non-prerelease version is active, then make # the new latest version active as well. old_highest_version = determine_stable_version(project.versions.all()) if old_highest_version is not None: activate_new_stable = old_highest_version.active else: activate_new_stable = False try: # Update All Versions data = request.data added_versions = set() if 'tags' in data: ret_set = sync_versions_to_db( project=project, versions=data['tags'], type=TAG, ) added_versions.update(ret_set) if 'branches' in data: ret_set = sync_versions_to_db( project=project, versions=data['branches'], type=BRANCH, ) added_versions.update(ret_set) deleted_versions = delete_versions_from_db(project, data) except Exception as e: log.exception('Sync Versions Error') return Response( { 'error': str(e), }, status=status.HTTP_400_BAD_REQUEST, ) try: # The order of added_versions isn't deterministic. # We don't track the commit time or any other metadata. # We usually have one version added per webhook. run_automation_rules(project, added_versions) except Exception: # Don't interrupt the request if something goes wrong # in the automation rules. log.exception('Failed to execute automation rules for [%s]: %s', project.slug, added_versions) # TODO: move this to an automation rule promoted_version = project.update_stable_version() new_stable = project.get_stable_version() if promoted_version and new_stable and new_stable.active: log.info('Triggering new stable build: %(project)s:%(version)s', { 'project': project.slug, 'version': new_stable.identifier, }) trigger_build(project=project, version=new_stable) # Marking the tag that is considered the new stable version as # active and building it if it was just added. if (activate_new_stable and promoted_version.slug in added_versions): promoted_version.active = True promoted_version.save() trigger_build(project=project, version=promoted_version) return Response({ 'added_versions': added_versions, 'deleted_versions': deleted_versions, })
def sync_versions(self, request, **kwargs): # noqa: D205 """ Sync the version data in the repo (on the build server). Version data in the repo is synced with what we have in the database. :returns: the identifiers for the versions that have been deleted. """ project = get_object_or_404(Project.objects.api(request.user), pk=kwargs['pk']) # If the currently highest non-prerelease version is active, then make # the new latest version active as well. old_highest_version = determine_stable_version(project.versions.all()) if old_highest_version is not None: activate_new_stable = old_highest_version.active else: activate_new_stable = False try: # Update All Versions data = request.data added_versions = set() if 'tags' in data: ret_set = api_utils.sync_versions(project=project, versions=data['tags'], type=TAG) added_versions.update(ret_set) if 'branches' in data: ret_set = api_utils.sync_versions(project=project, versions=data['branches'], type=BRANCH) added_versions.update(ret_set) deleted_versions = api_utils.delete_versions(project, data) except Exception as e: log.exception('Sync Versions Error') return Response( { 'error': str(e), }, status=status.HTTP_400_BAD_REQUEST, ) promoted_version = project.update_stable_version() if promoted_version: new_stable = project.get_stable_version() log.info('Triggering new stable build: {project}:{version}'.format( project=project.slug, version=new_stable.identifier, )) trigger_build(project=project, version=new_stable) # Marking the tag that is considered the new stable version as # active and building it if it was just added. if (activate_new_stable and promoted_version.slug in added_versions): promoted_version.active = True promoted_version.save() trigger_build(project=project, version=promoted_version) return Response({ 'added_versions': added_versions, 'deleted_versions': deleted_versions, })
def save(self, commit=True): obj = super().save(commit=commit) if obj.active and not obj.built and not obj.uploaded: trigger_build(project=obj.project, version=obj) return obj
def save(self, *args, **kwargs): obj = super(VersionForm, self).save(*args, **kwargs) if obj.active and not obj.built and not obj.uploaded: trigger_build(project=obj.project, version=obj) return obj
def save(self, commit=True): """Trigger build on commit save""" project = super(ProjectTriggerBuildMixin, self).save(commit) if commit: trigger_build(project=project) return project
def post(self, request, project_slug): commit_to_retrigger = None project = get_object_or_404(Project, slug=project_slug) if not AdminPermission.is_admin(request.user, project): return HttpResponseForbidden() version_slug = request.POST.get('version_slug') build_pk = request.POST.get('build_pk') if build_pk: # Filter over external versions only when re-triggering a specific build version = get_object_or_404( Version.external.public(self.request.user), slug=version_slug, project=project, ) build_to_retrigger = get_object_or_404( Build.objects.all(), pk=build_pk, version=version, ) if build_to_retrigger != Build.objects.filter(version=version).first(): messages.add_message( request, messages.ERROR, "This build can't be re-triggered because it's " "not the latest build for this version.", ) return HttpResponseRedirect(request.path) # Set either the build to re-trigger it or None if build_to_retrigger: commit_to_retrigger = build_to_retrigger.commit log.info( 'Re-triggering build.', project_slug=project.slug, version_slug=version.slug, build_commit=build_to_retrigger.commit, build_id=build_to_retrigger.pk, ) else: # Use generic query when triggering a normal build version = get_object_or_404( self._get_versions(project), slug=version_slug, ) update_docs_task, build = trigger_build( project=project, version=version, commit=commit_to_retrigger, ) if (update_docs_task, build) == (None, None): # Build was skipped messages.add_message( request, messages.WARNING, "This project is currently disabled and can't trigger new builds.", ) return HttpResponseRedirect( reverse('builds_project_list', args=[project.slug]), ) return HttpResponseRedirect( reverse('builds_detail', args=[project.slug, build.pk]), )
def sync_versions_task(project_pk, tags_data, branches_data, **kwargs): """ Sync the version data in the repo (from build server) into our database. Creates new Version objects for tags/branches that aren't tracked in the database, and deletes Version objects for tags/branches that don't exists in the repository. :param tags_data: List of dictionaries with ``verbose_name`` and ``identifier``. :param branches_data: Same as ``tags_data`` but for branches. :returns: `True` or `False` if the task succeeded. """ project = Project.objects.get(pk=project_pk) # If the currently highest non-prerelease version is active, then make # the new latest version active as well. current_stable = project.get_original_stable_version() if current_stable is not None: activate_new_stable = current_stable.active else: activate_new_stable = False try: # Update All Versions added_versions = set() result = sync_versions_to_db( project=project, versions=tags_data, type=TAG, ) added_versions.update(result) result = sync_versions_to_db( project=project, versions=branches_data, type=BRANCH, ) added_versions.update(result) delete_versions_from_db( project=project, tags_data=tags_data, branches_data=branches_data, ) deleted_active_versions = get_deleted_active_versions( project=project, tags_data=tags_data, branches_data=branches_data, ) except Exception: log.exception('Sync Versions Error') return False try: # The order of added_versions isn't deterministic. # We don't track the commit time or any other metadata. # We usually have one version added per webhook. run_automation_rules(project, added_versions, deleted_active_versions) except Exception: # Don't interrupt the request if something goes wrong # in the automation rules. log.exception( 'Failed to execute automation rules.', project_slug=project.slug, versions=added_versions, ) # TODO: move this to an automation rule promoted_version = project.update_stable_version() new_stable = project.get_stable_version() if promoted_version and new_stable and new_stable.active: log.info( 'Triggering new stable build.', project_slug=project.slug, version_identifier=new_stable.identifier, ) trigger_build(project=project, version=new_stable) # Marking the tag that is considered the new stable version as # active and building it if it was just added. if ( activate_new_stable and promoted_version.slug in added_versions ): promoted_version.active = True promoted_version.save() trigger_build(project=project, version=promoted_version) return True
def handle(self, *args, **options): record = options['record'] force = options['force'] version = options['version'] if options.get('slugs', []): for slug in options['slugs']: if version and version != 'all': log.info('Updating version %s for %s', version, slug) for version in Version.objects.filter( project__slug=slug, slug=version, ): trigger_build(project=version.project, version=version) elif version == 'all': log.info('Updating all versions for %s', slug) for version in Version.objects.filter( project__slug=slug, active=True, uploaded=False, ): build_pk = None if record: build = Build.objects.create( project=version.project, version=version, type='html', state='triggered', ) build_pk = build.pk tasks.UpdateDocsTask().run( pk=version.project_id, build_pk=build_pk, record=record, version_pk=version.pk, ) else: p = Project.all_objects.get(slug=slug) log.info('Building %s', p) trigger_build(project=p, force=force, record=record) else: if version == 'all': log.info('Updating all versions') for version in Version.objects.filter( active=True, uploaded=False, ): tasks.UpdateDocsTask().run( pk=version.project_id, record=record, force=force, version_pk=version.pk, ) else: log.info('Updating all docs') for project in Project.objects.all(): tasks.UpdateDocsTask().run( pk=project.pk, record=record, force=force, )
def handle(self, *args, **options): force = options['force'] version = options['version'] slugs = options.get('slugs', []) if slugs: for slug in slugs: if version == 'all': log.info('Updating all versions for %s', slug) for version in Version.objects.filter( project__slug=slug, active=True, uploaded=False, ): trigger_build(project=version.project, version=version) elif version == INTERNAL: log.info('Updating all internal versions for %s', slug) for version in Version.internal.filter( project__slug=slug, active=True, uploaded=False, ): trigger_build(project=version.project, version=version) elif version == EXTERNAL: log.info('Updating all external versions for %s', slug) for version in Version.external.filter( project__slug=slug, active=True, uploaded=False, ): trigger_build(project=version.project, version=version) elif version: log.info('Updating version %s for %s', version, slug) for version in Version.objects.filter( project__slug=slug, slug=version, ): trigger_build(project=version.project, version=version) else: p = Project.all_objects.get(slug=slug) log.info('Building %s', p) trigger_build(project=p, force=force) else: if version == 'all': log.info('Updating all versions') for version in Version.objects.filter( active=True, uploaded=False, ): trigger_build(project=version.project, version=version) elif version == INTERNAL: log.info('Updating all internal versions') for version in Version.internal.filter( active=True, uploaded=False, ): trigger_build(project=version.project, version=version) elif version == EXTERNAL: log.info('Updating all external versions') for version in Version.external.filter( active=True, uploaded=False, ): trigger_build(project=version.project, version=version) elif version: log.info('Updating version %s', version) for version in Version.objects.filter(slug=version, ): trigger_build(project=version.project, version=version) else: log.info('Updating all docs') for project in Project.objects.all(): # pylint: disable=no-value-for-parameter default_version = project.get_default_version() version = project.versions.get(slug=default_version) trigger_build(project=version.project, version=version)
class ProjectViewSet(viewsets.ModelViewSet): permission_classes = [APIPermission] renderer_classes = (JSONRenderer, JSONPRenderer, BrowsableAPIRenderer) serializer_class = ProjectSerializer filter_class = ProjectFilter model = Project paginate_by = 100 paginate_by_param = 'page_size' max_paginate_by = 1000 def get_queryset(self): return self.model.objects.api(self.request.user) @decorators.detail_route() def valid_versions(self, request, **kwargs): """ Maintain state of versions that are wanted. """ project = get_object_or_404(Project.objects.api(self.request.user), pk=kwargs['pk']) if not project.num_major or not project.num_minor or not project.num_point: return Response( {'error': 'Project does not support point version control'}, status=status.HTTP_400_BAD_REQUEST) version_strings = project.supported_versions() # Disable making old versions inactive for now. # project.versions.exclude(verbose_name__in=version_strings).update(active=False) project.versions.filter(verbose_name__in=version_strings).update( active=True) return Response({ 'flat': version_strings, }) @detail_route() def translations(self, request, pk, **kwargs): translations = self.get_object().translations.all() return Response( {'translations': ProjectSerializer(translations, many=True).data}) @detail_route() def subprojects(self, request, **kwargs): project = get_object_or_404(Project.objects.api(self.request.user), pk=kwargs['pk']) rels = project.subprojects.all() children = [rel.child for rel in rels] return Response( {'subprojects': ProjectSerializer(children, many=True).data}) @decorators.detail_route(permission_classes=[permissions.IsAdminUser]) def token(self, request, **kwargs): project = get_object_or_404(Project.objects.api(self.request.user), pk=kwargs['pk']) token = oauth_utils.get_token_for_project(project, force_local=True) return Response({'token': token}) @decorators.detail_route() def canonical_url(self, request, **kwargs): project = get_object_or_404(Project.objects.api(self.request.user), pk=kwargs['pk']) return Response({'url': project.get_docs_url()}) @decorators.detail_route(permission_classes=[permissions.IsAdminUser], methods=['post']) def sync_versions(self, request, **kwargs): """ Sync the version data in the repo (on the build server) with what we have in the database. Returns the identifiers for the versions that have been deleted. """ project = get_object_or_404(Project.objects.api(self.request.user), pk=kwargs['pk']) # If the currently highest non-prerelease version is active, then make # the new latest version active as well. old_highest_version = determine_stable_version(project.versions.all()) if old_highest_version is not None: activate_new_stable = old_highest_version.active else: activate_new_stable = False try: # Update All Versions data = request.DATA added_versions = set() if 'tags' in data: ret_set = api_utils.sync_versions(project=project, versions=data['tags'], type=TAG) added_versions.update(ret_set) if 'branches' in data: ret_set = api_utils.sync_versions(project=project, versions=data['branches'], type=BRANCH) added_versions.update(ret_set) deleted_versions = api_utils.delete_versions(project, data) except Exception, e: log.exception("Sync Versions Error: %s" % e.message) return Response({'error': e.message}, status=status.HTTP_400_BAD_REQUEST) try: old_stable = project.get_stable_version() promoted_version = project.update_stable_version() if promoted_version: new_stable = project.get_stable_version() log.info( "Triggering new stable build: {project}:{version}".format( project=project.slug, version=new_stable.identifier)) trigger_build(project=project, version=new_stable) # Marking the tag that is considered the new stable version as # active and building it if it was just added. if (activate_new_stable and promoted_version.slug in added_versions): promoted_version.active = True promoted_version.save() trigger_build(project=project, version=promoted_version) except: log.exception("Stable Version Failure", exc_info=True) return Response({ 'added_versions': added_versions, 'deleted_versions': deleted_versions, })
def build_version(self, request, **kwargs): project = get_object_or_404(Project, slug=kwargs['project_slug']) version = kwargs.get('version_slug', LATEST) version_obj = project.versions.get(slug=version) trigger_build(project=project, version=version_obj) return self.create_response(request, {'building': True})
def save(self, commit=True): """Trigger build on commit save.""" project = super().save(commit) if commit: trigger_build(project=project) return project