def prepare_build( project, version=None, record=True, force=False, immutable=True, ): """ Prepare a build in a Celery task for project and version. If project has a ``build_queue``, execute the task on this build queue. If project has ``skip=True``, the build is not triggered. :param project: project's documentation to be built :param version: version of the project to be built. Default: ``project.get_default_version()`` :param record: whether or not record the build in a new Build object :param force: build the HTML documentation even if the files haven't changed :param immutable: whether or not create an immutable Celery signature :returns: Celery signature of update_docs_task and Build instance :rtype: tuple """ # Avoid circular import from readthedocs.builds.models import Build from readthedocs.projects.models import Project from readthedocs.projects.tasks import update_docs_task build = None if not Project.objects.is_active(project): log.warning( 'Build not triggered because Project is not active: project=%s', project.slug, ) return (None, None) if not version: default_version = project.get_default_version() version = project.versions.get(slug=default_version) kwargs = { 'record': record, 'force': force, } if record: build = Build.objects.create( project=project, version=version, type='html', state=BUILD_STATE_TRIGGERED, success=True, ) kwargs['build_pk'] = build.pk options = {} if project.build_queue: options['queue'] = project.build_queue # Set per-task time limit time_limit = DOCKER_LIMITS['time'] try: if project.container_time_limit: time_limit = int(project.container_time_limit) except ValueError: log.warning('Invalid time_limit for project: %s', project.slug) # Add 20% overhead to task, to ensure the build can timeout and the task # will cleanly finish. options['soft_time_limit'] = time_limit options['time_limit'] = int(time_limit * 1.2) return ( update_docs_task.signature( args=(version.pk, ), kwargs=kwargs, options=options, immutable=True, ), build, )
def prepare_build( project, version=None, record=True, force=False, immutable=True, ): """ Prepare a build in a Celery task for project and version. If project has a ``build_queue``, execute the task on this build queue. If project has ``skip=True``, the build is not triggered. :param project: project's documentation to be built :param version: version of the project to be built. Default: ``project.get_default_version()`` :param record: whether or not record the build in a new Build object :param force: build the HTML documentation even if the files haven't changed :param immutable: whether or not create an immutable Celery signature :returns: Celery signature of update_docs_task and Build instance :rtype: tuple """ # Avoid circular import from readthedocs.builds.models import Build from readthedocs.projects.models import Project from readthedocs.projects.tasks import update_docs_task build = None if not Project.objects.is_active(project): log.warning( 'Build not triggered because Project is not active: project=%s', project.slug, ) return (None, None) if not version: default_version = project.get_default_version() version = project.versions.get(slug=default_version) kwargs = { 'version_pk': version.pk, 'record': record, 'force': force, } if record: build = Build.objects.create( project=project, version=version, type='html', state=BUILD_STATE_TRIGGERED, success=True, ) kwargs['build_pk'] = build.pk options = {} if project.build_queue: options['queue'] = project.build_queue # Set per-task time limit time_limit = DOCKER_LIMITS['time'] try: if project.container_time_limit: time_limit = int(project.container_time_limit) except ValueError: log.warning('Invalid time_limit for project: %s', project.slug) # Add 20% overhead to task, to ensure the build can timeout and the task # will cleanly finish. options['soft_time_limit'] = time_limit options['time_limit'] = int(time_limit * 1.2) return ( update_docs_task.signature( args=(project.pk,), kwargs=kwargs, options=options, immutable=True, ), build, )
def prepare_build( project, version=None, commit=None, record=True, force=False, immutable=True, ): """ Prepare a build in a Celery task for project and version. If project has a ``build_queue``, execute the task on this build queue. If project has ``skip=True``, the build is not triggered. :param project: project's documentation to be built :param version: version of the project to be built. Default: ``project.get_default_version()`` :param commit: commit sha of the version required for sending build status reports :param record: whether or not record the build in a new Build object :param force: build the HTML documentation even if the files haven't changed :param immutable: whether or not create an immutable Celery signature :returns: Celery signature of update_docs_task and Build instance :rtype: tuple """ # Avoid circular import from readthedocs.builds.models import Build from readthedocs.projects.models import Project, Feature from readthedocs.projects.tasks import ( update_docs_task, send_external_build_status, send_notifications, ) build = None if not Project.objects.is_active(project): log.warning( 'Build not triggered because Project is not active: project=%s', project.slug, ) return (None, None) if not version: default_version = project.get_default_version() version = project.versions.get(slug=default_version) kwargs = { 'record': record, 'force': force, 'commit': commit, } if record: build = Build.objects.create(project=project, version=version, type='html', state=BUILD_STATE_TRIGGERED, success=True, commit=commit) kwargs['build_pk'] = build.pk options = {} if project.build_queue: options['queue'] = project.build_queue # Set per-task time limit time_limit = DOCKER_LIMITS['time'] try: if project.container_time_limit: time_limit = int(project.container_time_limit) except ValueError: log.warning('Invalid time_limit for project: %s', project.slug) # Add 20% overhead to task, to ensure the build can timeout and the task # will cleanly finish. options['soft_time_limit'] = time_limit options['time_limit'] = int(time_limit * 1.2) if build and commit: # Send pending Build Status using Git Status API for External Builds. send_external_build_status(version_type=version.type, build_pk=build.id, commit=commit, status=BUILD_STATUS_PENDING) if build and version.type != EXTERNAL: # Send Webhook notification for build triggered. send_notifications.delay(version.pk, build_pk=build.pk, email=False) options['priority'] = CELERY_HIGH if project.main_language_project: # Translations should be medium priority options['priority'] = CELERY_MEDIUM if version.type == EXTERNAL: # External builds should be lower priority. options['priority'] = CELERY_LOW # Start the build in X minutes and mark it as limited if project.has_feature(Feature.LIMIT_CONCURRENT_BUILDS): running_builds = (Build.objects.filter( project__slug=project.slug).exclude( state__in=[BUILD_STATE_TRIGGERED, BUILD_STATE_FINISHED])) max_concurrent_builds = project.max_concurrent_builds or settings.RTD_MAX_CONCURRENT_BUILDS if running_builds.count() >= max_concurrent_builds: log.warning( 'Delaying tasks at trigger step due to concurrency limit. project=%s version=%s', project.slug, version.slug, ) options['countdown'] = 5 * 60 options['max_retries'] = 25 build.error = BuildMaxConcurrencyError.message.format( limit=max_concurrent_builds, ) build.save() return ( update_docs_task.signature( args=(version.pk, ), kwargs=kwargs, options=options, immutable=True, ), build, )
def prepare_build( project, version=None, commit=None, record=True, force=False, immutable=True, ): """ Prepare a build in a Celery task for project and version. If project has a ``build_queue``, execute the task on this build queue. If project has ``skip=True``, the build is not triggered. :param project: project's documentation to be built :param version: version of the project to be built. Default: ``project.get_default_version()`` :param commit: commit sha of the version required for sending build status reports :param record: whether or not record the build in a new Build object :param force: build the HTML documentation even if the files haven't changed :param immutable: whether or not create an immutable Celery signature :returns: Celery signature of update_docs_task and Build instance :rtype: tuple """ # Avoid circular import from readthedocs.builds.models import Build from readthedocs.projects.models import Project, Feature from readthedocs.projects.tasks import ( update_docs_task, send_external_build_status, send_notifications, ) build = None if not Project.objects.is_active(project): log.warning( 'Build not triggered because Project is not active: project=%s', project.slug, ) return (None, None) if not version: default_version = project.get_default_version() version = project.versions.get(slug=default_version) kwargs = { 'record': record, 'force': force, 'commit': commit, } if record: build = Build.objects.create(project=project, version=version, type='html', state=BUILD_STATE_TRIGGERED, success=True, commit=commit) kwargs['build_pk'] = build.pk options = {} if project.build_queue: options['queue'] = project.build_queue # Set per-task time limit # TODO remove the use of Docker limits or replace the logic here. This # was pulling the Docker limits that were set on each stack, but we moved # to dynamic setting of the Docker limits. This sets a failsafe higher # limit, but if no builds hit this limit, it should be safe to remove and # rely on Docker to terminate things on time. # time_limit = DOCKER_LIMITS['time'] time_limit = 7200 try: if project.container_time_limit: time_limit = int(project.container_time_limit) except ValueError: log.warning('Invalid time_limit for project: %s', project.slug) # Add 20% overhead to task, to ensure the build can timeout and the task # will cleanly finish. options['soft_time_limit'] = time_limit options['time_limit'] = int(time_limit * 1.2) if build and commit: # Send pending Build Status using Git Status API for External Builds. send_external_build_status(version_type=version.type, build_pk=build.id, commit=commit, status=BUILD_STATUS_PENDING) if build and version.type != EXTERNAL: # Send Webhook notification for build triggered. send_notifications.delay(version.pk, build_pk=build.pk, email=False) options['priority'] = CELERY_HIGH if project.main_language_project: # Translations should be medium priority options['priority'] = CELERY_MEDIUM if version.type == EXTERNAL: # External builds should be lower priority. options['priority'] = CELERY_LOW skip_build = False if commit: skip_build = (Build.objects.filter( project=project, version=version, commit=commit, ).exclude(state=BUILD_STATE_FINISHED, ).exclude( pk=build.pk, ).exists()) else: skip_build = Build.objects.filter( project=project, version=version, state=BUILD_STATE_TRIGGERED, ).count() > 1 if not project.has_feature(Feature.DEDUPLICATE_BUILDS): log.debug( 'Skipping deduplication of builds. Feature not enabled. project=%s', project.slug) skip_build = False if skip_build: # TODO: we could mark the old build as duplicated, however we reset our # position in the queue and go back to the end of it --penalization log.warning( 'Marking build to be skipped by builder. project=%s version=%s build=%s commit=%s', project.slug, version.slug, build.pk, commit, ) build.error = DuplicatedBuildError.message build.status = DuplicatedBuildError.status build.exit_code = DuplicatedBuildError.exit_code build.success = False build.state = BUILD_STATE_FINISHED build.save() # Start the build in X minutes and mark it as limited if not skip_build and project.has_feature(Feature.LIMIT_CONCURRENT_BUILDS): limit_reached, _, max_concurrent_builds = Build.objects.concurrent( project) if limit_reached: log.warning( 'Delaying tasks at trigger step due to concurrency limit. project=%s version=%s', project.slug, version.slug, ) options['countdown'] = 5 * 60 options['max_retries'] = 25 build.error = BuildMaxConcurrencyError.message.format( limit=max_concurrent_builds, ) build.save() return ( update_docs_task.signature( args=(version.pk, ), kwargs=kwargs, options=options, immutable=True, ), build, )