def run_setup(self, record=True): """ Run setup in the local environment. Return True if successful. """ self.setup_env = LocalBuildEnvironment( project=self.project, version=self.version, build=self.build, record=record, update_on_success=False, ) # Environment used for code checkout & initial configuration reading with self.setup_env: if self.project.skip: raise ProjectBuildsSkippedError try: self.setup_vcs() except vcs_support_utils.LockTimeout as e: self.task.retry(exc=e, throw=False) raise VersionLockedError try: self.config = load_yaml_config(version=self.version) except ConfigError as e: raise YAMLParseError( YAMLParseError.GENERIC_WITH_PARSE_EXCEPTION.format( exception=str(e), ), ) self.save_build_config() self.additional_vcs_operations() if self.setup_env.failure or self.config is None: msg = 'Failing build because of setup failure: {}'.format( self.setup_env.failure, ) log.info( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg=msg, ), ) # Send notification to users only if the build didn't fail because # of VersionLockedError: this exception occurs when a build is # triggered before the previous one has finished (e.g. two webhooks, # one after the other) if not isinstance(self.setup_env.failure, VersionLockedError): self.send_notifications() return False if self.setup_env.successful and not self.project.has_valid_clone: self.set_valid_clone() return True
def test_record_command_as_success(self, api_v2): project = get(Project) build_env = LocalBuildEnvironment( project=project, build={ 'id': 1, }, ) with build_env: build_env.run('false', record_as_success=True) self.assertEqual(len(build_env.commands), 1) command = build_env.commands[0] self.assertEqual(command.exit_code, 0) api_v2.command.post.assert_called_once_with({ 'build': mock.ANY, 'command': command.get_command(), 'output': command.output, 'exit_code': 0, 'start_time': command.start_time, 'end_time': command.end_time, })
def test_command_not_recorded(self, api_v2): build_env = LocalBuildEnvironment() with build_env: build_env.run('true', record=False) self.assertEqual(len(build_env.commands), 0) api_v2.command.post.assert_not_called()
def test_incremental_state_update_with_no_update(self): """Build updates to a non-finished state when update_on_success=True.""" build_envs = [ LocalBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, ), LocalBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, update_on_success=False, ), ] for build_env in build_envs: with build_env: build_env.update_build(BUILD_STATE_CLONING) self.mocks.mocks['api_v2.build']().put.assert_called_with({ 'id': DUMMY_BUILD_ID, 'version': self.version.pk, 'success': True, 'project': self.project.pk, 'setup_error': u'', 'length': mock.ANY, 'error': '', 'setup': u'', 'output': u'', 'state': BUILD_STATE_CLONING, 'builder': mock.ANY, 'exit_code': 0, })
def run_setup(self, record=True): """ Run setup in the local environment. Return True if successful. """ self.setup_env = LocalBuildEnvironment( project=self.project, version=self.version, build=self.build, record=record, update_on_success=False, ) # Environment used for code checkout & initial configuration reading with self.setup_env: if self.project.skip: raise BuildEnvironmentError( _('Builds for this project are temporarily disabled')) try: self.setup_vcs() except vcs_support_utils.LockTimeout as e: self.retry(exc=e, throw=False) raise BuildEnvironmentError( 'Version locked, retrying in 5 minutes.', status_code=423 ) try: self.config = load_yaml_config(version=self.version) except ConfigError as e: raise BuildEnvironmentError( 'Problem parsing YAML configuration. {0}'.format(str(e)) ) if self.setup_env.failure or self.config is None: self._log('Failing build because of setup failure: %s' % self.setup_env.failure) # Send notification to users only if the build didn't fail because of # LockTimeout: this exception occurs when a build is triggered before the previous # one has finished (e.g. two webhooks, one after the other) if not isinstance(self.setup_env.failure, vcs_support_utils.LockTimeout): self.send_notifications() return False if self.setup_env.successful and not self.project.has_valid_clone: self.set_valid_clone() return True
def test_failing_execution(self): """Build in failing state.""" self.mocks.configure_mock( 'process', {'communicate.return_value': (b'This is not okay', '')}) type(self.mocks.process).returncode = PropertyMock(return_value=1) build_env = LocalBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, ) with build_env: build_env.run('echo', 'test') self.fail('This should be unreachable') self.assertTrue(self.mocks.process.communicate.called) self.assertTrue(build_env.done) self.assertTrue(build_env.failed) self.assertEqual(len(build_env.commands), 1) self.assertEqual(build_env.commands[0].output, u'This is not okay') # api() is not called anymore, we use api_v2 instead self.assertFalse(self.mocks.api()(DUMMY_BUILD_ID).put.called) self.mocks.mocks['api_v2.build']().put.assert_called_with({ 'id': DUMMY_BUILD_ID, 'version': self.version.pk, 'success': False, 'project': self.project.pk, 'setup_error': u'', 'length': mock.ANY, 'error': '', 'setup': u'', 'output': u'', 'state': u'finished', 'builder': mock.ANY, 'exit_code': 1, })
def test_build_respects_yaml(self): '''Test YAML build options''' project = get(Project, slug='project-1', documentation_type='sphinx', conf_py_file='test_conf.py', enable_pdf_build=False, enable_epub_build=False, versions=[fixture()]) version = project.versions.all()[0] build_env = LocalBuildEnvironment(project=project, version=version, build={}) python_env = Virtualenv(version=version, build_env=build_env) config = ConfigWrapper(version=version, yaml_config=create_load({'formats': ['epub']})()[0]) task = UpdateDocsTaskStep(build_env=build_env, project=project, python_env=python_env, version=version, search=False, localmedia=False, config=config) task.build_docs() # The HTML and the Epub format were built. self.mocks.html_build.assert_called_once_with() self.mocks.epub_build.assert_called_once_with() # PDF however was disabled and therefore not built. self.assertFalse(self.mocks.pdf_build.called)
def test_dont_localmedia_build_pdf_epub_search_in_mkdocs( self, load_config): load_config.side_effect = create_load() project = get(Project, slug='project-1', documentation_type='mkdocs', enable_pdf_build=True, enable_epub_build=True, versions=[fixture()]) version = project.versions.all().first() build_env = LocalBuildEnvironment(project=project, version=version, build={}) python_env = Virtualenv(version=version, build_env=build_env) config = load_yaml_config(version) task = UpdateDocsTaskStep(build_env=build_env, project=project, python_env=python_env, version=version, config=config) task.build_docs() # Only html for mkdocs was built self.mocks.html_build_mkdocs.assert_called_once() self.mocks.html_build.assert_not_called() self.mocks.localmedia_build.assert_not_called() self.mocks.pdf_build.assert_not_called() self.mocks.epub_build.assert_not_called()
def test_build(self, load_config): '''Test full build''' load_config.side_effect = create_load() project = get(Project, slug='project-1', documentation_type='sphinx', conf_py_file='test_conf.py', versions=[fixture()]) version = project.versions.all()[0] self.mocks.configure_mock('api_versions', {'return_value': [version]}) self.mocks.configure_mock( 'api', {'get.return_value': { 'downloads': "no_url_here" }}) self.mocks.patches['html_build'].stop() build_env = LocalBuildEnvironment(project=project, version=version, build={}) python_env = Virtualenv(version=version, build_env=build_env) config = load_yaml_config(version) task = UpdateDocsTaskStep(build_env=build_env, project=project, python_env=python_env, version=version, config=config) task.build_docs() # Get command and check first part of command list is a call to sphinx self.assertEqual(self.mocks.popen.call_count, 3) cmd = self.mocks.popen.call_args_list[2][0] self.assertRegex(cmd[0][0], r'python') self.assertRegex(cmd[0][1], r'sphinx-build')
def test_failing_execution_with_unexpected_exception(self): """Build in failing state with exception from code.""" build_env = LocalBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, ) with build_env: raise ValueError('uncaught') self.assertFalse(self.mocks.process.communicate.called) self.assertTrue(build_env.done) self.assertTrue(build_env.failed) # api() is not called anymore, we use api_v2 instead self.assertFalse(self.mocks.api()(DUMMY_BUILD_ID).put.called) self.mocks.mocks['api_v2.build']().put.assert_called_with({ 'id': DUMMY_BUILD_ID, 'version': self.version.pk, 'success': False, 'project': self.project.pk, 'setup_error': u'', 'length': mock.ANY, 'error': ( 'There was a problem with Read the Docs while building your ' 'documentation. Please report this to us with your build id (123).' ), 'setup': u'', 'output': u'', 'state': u'finished', 'builder': mock.ANY, })
def test_failing_execution_with_caught_exception(self): """Build in failing state with BuildEnvironmentError exception.""" build_env = LocalBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, ) with build_env: raise BuildEnvironmentError('Foobar') self.assertFalse(self.mocks.process.communicate.called) self.assertEqual(len(build_env.commands), 0) self.assertTrue(build_env.done) self.assertTrue(build_env.failed) # api() is not called anymore, we use api_v2 instead self.assertFalse(self.mocks.api()(DUMMY_BUILD_ID).put.called) self.mocks.mocks['api_v2.build']().put.assert_called_with({ 'id': DUMMY_BUILD_ID, 'version': self.version.pk, 'success': False, 'project': self.project.pk, 'setup_error': u'', 'length': mock.ANY, 'error': 'Foobar', 'setup': u'', 'output': u'', 'state': u'finished', 'builder': mock.ANY, 'exit_code': 1, })
def test_build_respects_pdf_flag(self, load_config): '''Build output format control''' load_config.side_effect = create_load() project = get(Project, slug='project-1', documentation_type='sphinx', conf_py_file='test_conf.py', enable_pdf_build=True, enable_epub_build=False, versions=[fixture()]) version = project.versions.all()[0] build_env = LocalBuildEnvironment(project=project, version=version, build={}) python_env = Virtualenv(version=version, build_env=build_env) config = load_yaml_config(version) task = UpdateDocsTaskStep(build_env=build_env, project=project, python_env=python_env, version=version, config=config) task.build_docs() # The HTML and the Epub format were built. self.mocks.html_build.assert_called_once_with() self.mocks.pdf_build.assert_called_once_with() # PDF however was disabled and therefore not built. self.assertFalse(self.mocks.epub_build.called)
def setUp(self): self.project = get(Project, documentation_type='mkdocs', name='mkdocs') self.version = get(Version, project=self.project) self.build_env = LocalBuildEnvironment(record=False) self.build_env.project = self.project self.build_env.version = self.version
def test_builder_no_comments(self): '''Test builder without comments''' project = get(Project, documentation_type='sphinx', allow_comments=False, versions=[fixture()]) version = project.versions.all()[0] build_env = LocalBuildEnvironment(version=version, project=project, build={}) python_env = Virtualenv(version=version, build_env=build_env) builder_class = get_builder_class(project.documentation_type) builder = builder_class(build_env, python_env) self.assertEqual(builder.sphinx_builder, 'readthedocs')
def get_update_docs_task(self): build_env = LocalBuildEnvironment(self.project, self.version, record=False) update_docs = tasks.UpdateDocsTaskStep( build_env=build_env, config=load_yaml_config(self.version), project=self.project, version=self.version, ) return update_docs
def test_build_pdf_latex_not_failure(self, load_config): """Test pass during PDF builds and bad latex failure status code.""" load_config.side_effect = create_load() self.mocks.patches['html_build'].stop() self.mocks.patches['pdf_build'].stop() project = get( Project, slug='project-2', documentation_type='sphinx', conf_py_file='test_conf.py', enable_pdf_build=True, enable_epub_build=False, versions=[fixture()], ) version = project.versions.all()[0] assert project.conf_dir() == '/tmp/rtd' build_env = LocalBuildEnvironment(project=project, version=version, build={}) python_env = Virtualenv(version=version, build_env=build_env) config = load_yaml_config(version) task = UpdateDocsTaskStep( build_env=build_env, project=project, python_env=python_env, version=version, config=config, ) # Mock out the separate calls to Popen using an iterable side_effect returns = [ ((b'', b''), 0), # sphinx-build html ((b'', b''), 0), # sphinx-build pdf ((b'', b''), 1), # sphinx version check ((b'Output written on foo.pdf', b''), 1), # latex ((b'', b''), 0), # makeindex ((b'', b''), 0), # latex ] mock_obj = mock.Mock() mock_obj.communicate.side_effect = [ output for (output, status) in returns ] type(mock_obj).returncode = mock.PropertyMock( side_effect=[status for (output, status) in returns], ) self.mocks.popen.return_value = mock_obj with build_env: task.build_docs() self.assertEqual(self.mocks.popen.call_count, 6) self.assertTrue(build_env.successful)
def test_failing_execution(self): """Build in failing state.""" self.mocks.configure_mock('process', { 'communicate.return_value': (b'This is not okay', '') }) type(self.mocks.process).returncode = PropertyMock(return_value=1) build_env = LocalBuildEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, ) with build_env: build_env.run('echo', 'test') self.fail('This should be unreachable') self.assertTrue(self.mocks.process.communicate.called) self.assertTrue(build_env.done) self.assertTrue(build_env.failed) self.assertEqual(len(build_env.commands), 1) self.assertEqual(build_env.commands[0].output, u'This is not okay') # api() is not called anymore, we use api_v2 instead self.assertFalse(self.mocks.api()(DUMMY_BUILD_ID).put.called) self.mocks.mocks['api_v2.build']().put.assert_called_with({ 'id': DUMMY_BUILD_ID, 'version': self.version.pk, 'success': False, 'project': self.project.pk, 'setup_error': u'', 'length': mock.ANY, 'error': '', 'setup': u'', 'output': u'', 'state': u'finished', 'builder': mock.ANY, 'exit_code': 1, })
def run_setup(self, record=True): """ Run setup in the local environment. Return True if successful. """ self.setup_env = LocalBuildEnvironment( project=self.project, version=self.version, build=self.build, record=record, update_on_success=False, ) # Environment used for code checkout & initial configuration reading with self.setup_env: if self.project.skip: raise BuildEnvironmentError( _('Builds for this project are temporarily disabled')) try: self.setup_vcs() except vcs_support_utils.LockTimeout as e: self.task.retry(exc=e, throw=False) raise BuildEnvironmentError( 'Version locked, retrying in 5 minutes.', status_code=423 ) try: self.config = load_yaml_config(version=self.version) except ConfigError as e: raise BuildEnvironmentError( 'Problem parsing YAML configuration. {0}'.format(str(e)) ) if self.setup_env.failure or self.config is None: self._log('Failing build because of setup failure: %s' % self.setup_env.failure) # Send notification to users only if the build didn't fail because of # LockTimeout: this exception occurs when a build is triggered before the previous # one has finished (e.g. two webhooks, one after the other) if not isinstance(self.setup_env.failure, vcs_support_utils.LockTimeout): self.send_notifications() return False if self.setup_env.successful and not self.project.has_valid_clone: self.set_valid_clone() return True
def test_build_pdf_latex_failures(self): '''Build failure if latex fails''' self.mocks.patches['html_build'].stop() self.mocks.patches['pdf_build'].stop() project = get(Project, slug='project-1', documentation_type='sphinx', conf_py_file='test_conf.py', enable_pdf_build=True, enable_epub_build=False, versions=[fixture()]) version = project.versions.all()[0] assert project.conf_dir() == '/tmp/rtd' build_env = LocalBuildEnvironment(project=project, version=version, build={}) python_env = Virtualenv(version=version, build_env=build_env) config = ConfigWrapper(version=version, yaml_config=create_load()()[0]) task = UpdateDocsTaskStep(build_env=build_env, project=project, python_env=python_env, version=version, search=False, localmedia=False, config=config) # Mock out the separate calls to Popen using an iterable side_effect returns = [ ((b'', b''), 0), # sphinx-build html ((b'', b''), 0), # sphinx-build pdf ((b'', b''), 1), # latex ((b'', b''), 0), # makeindex ((b'', b''), 0), # latex ] mock_obj = mock.Mock() mock_obj.communicate.side_effect = [ output for (output, status) in returns ] type(mock_obj).returncode = mock.PropertyMock( side_effect=[status for (output, status) in returns]) self.mocks.popen.return_value = mock_obj with build_env: task.build_docs() self.assertEqual(self.mocks.popen.call_count, 7) self.assertTrue(build_env.failed)
def get_update_docs_task(self, version): build_env = LocalBuildEnvironment( version.project, version, record=False, ) update_docs = tasks.UpdateDocsTaskStep( build_env=build_env, project=version.project, version=version, build={ 'id': 99, 'state': BUILD_STATE_TRIGGERED, }, ) return update_docs
def get_update_docs_task(self): build_env = LocalBuildEnvironment( self.project, self.version, record=False, ) update_docs = tasks.UpdateDocsTaskStep( build_env=build_env, config=load_yaml_config(self.version), project=self.project, version=self.version, build={ 'id': 99, 'state': BUILD_STATE_TRIGGERED, }, ) return update_docs
def __init__( self, project, version_slug, environment=None, verbose_name=None, version_type=None, **kwargs ): self.default_branch = project.default_branch self.project = project self.name = project.name self.repo_url = project.clean_repo self.working_dir = project.checkout_path(version_slug) # required for External versions self.verbose_name = verbose_name self.version_type = version_type # TODO: always pass an explict environment # This is only used in tests #6546 from readthedocs.doc_builder.environments import LocalBuildEnvironment self.environment = environment or LocalBuildEnvironment(record=False) # Update the env variables with the proper VCS env variables self.environment.environment.update(self.env)
def __init__(self, project, version_slug, environment=None, verbose_name=None, version_type=None, **kwargs): self.default_branch = project.default_branch self.project = project self.name = project.name self.repo_url = project.clean_repo self.working_dir = project.checkout_path(version_slug) # required for External versions self.verbose_name = verbose_name self.version_type = version_type # TODO: always pass an explicit environment # This is only used in tests #6546 # # TODO: we should not allow ``environment=None`` and always use the # environment defined by the settings from readthedocs.doc_builder.environments import LocalBuildEnvironment self.environment = environment or LocalBuildEnvironment()
class UpdateDocsTask(SyncRepositoryMixin, Task): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported, we created it or a webhook is received. Then it will sync the repository and build the html docs if needed. """ max_retries = 5 default_retry_delay = (7 * 60) name = __name__ + '.update_docs' # TODO: the argument from the __init__ are used only in tests def __init__(self, build_env=None, python_env=None, config=None, force=False, search=True, localmedia=True, build=None, project=None, version=None): self.build_env = build_env self.python_env = python_env self.build_force = force self.build_search = search self.build_localmedia = localmedia self.build = {} if build is not None: self.build = build self.version = {} if version is not None: self.version = version self.project = {} if project is not None: self.project = project if config is not None: self.config = config def _log(self, msg): log.info(LOG_TEMPLATE .format(project=self.project.slug, version=self.version.slug, msg=msg)) # pylint: disable=arguments-differ def run(self, pk, version_pk=None, build_pk=None, record=True, docker=None, search=True, force=False, localmedia=True, **__): """ Run a documentation sync n' build. This is fully wrapped in exception handling to account for a number of failure cases. We first run a few commands in a local build environment, but do not report on environment success. This avoids a flicker on the build output page where the build is marked as finished in between the local environment steps and the docker build steps. If a failure is raised, or the build is not successful, return ``False``, otherwise, ``True``. Unhandled exceptions raise a generic user facing error, which directs the user to bug us. It is therefore a benefit to have as few unhandled errors as possible. :param pk int: Project id :param version_pk int: Project Version id (latest if None) :param build_pk int: Build id (if None, commands are not recorded) :param record bool: record a build object in the database :param docker bool: use docker to build the project (if ``None``, ``settings.DOCKER_ENABLE`` is used) :param search bool: update search :param force bool: force Sphinx build :param localmedia bool: update localmedia :returns: whether build was successful or not :rtype: bool """ try: if docker is None: docker = settings.DOCKER_ENABLE self.project = self.get_project(pk) self.version = self.get_version(self.project, version_pk) self.build = self.get_build(build_pk) self.build_search = search self.build_localmedia = localmedia self.build_force = force self.config = None setup_successful = self.run_setup(record=record) if not setup_successful: return False # Catch unhandled errors in the setup step except Exception as e: # noqa log.exception( 'An unhandled exception was raised during build setup', extra={'tags': {'build': build_pk}} ) self.setup_env.failure = BuildEnvironmentError( BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format( build_id=build_pk, ) ) self.setup_env.update_build(BUILD_STATE_FINISHED) return False else: # No exceptions in the setup step, catch unhandled errors in the # build steps try: self.run_build(docker=docker, record=record) except Exception as e: # noqa log.exception( 'An unhandled exception was raised during project build', extra={'tags': {'build': build_pk}} ) self.build_env.failure = BuildEnvironmentError( BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format( build_id=build_pk, ) ) self.build_env.update_build(BUILD_STATE_FINISHED) return False return True def run_setup(self, record=True): """ Run setup in the local environment. Return True if successful. """ self.setup_env = LocalBuildEnvironment( project=self.project, version=self.version, build=self.build, record=record, update_on_success=False, ) # Environment used for code checkout & initial configuration reading with self.setup_env: if self.project.skip: raise BuildEnvironmentError( _('Builds for this project are temporarily disabled')) try: self.setup_vcs() except vcs_support_utils.LockTimeout as e: self.retry(exc=e, throw=False) raise BuildEnvironmentError( 'Version locked, retrying in 5 minutes.', status_code=423 ) try: self.config = load_yaml_config(version=self.version) except ConfigError as e: raise BuildEnvironmentError( 'Problem parsing YAML configuration. {0}'.format(str(e)) ) if self.setup_env.failure or self.config is None: self._log('Failing build because of setup failure: %s' % self.setup_env.failure) # Send notification to users only if the build didn't fail because of # LockTimeout: this exception occurs when a build is triggered before the previous # one has finished (e.g. two webhooks, one after the other) if not isinstance(self.setup_env.failure, vcs_support_utils.LockTimeout): self.send_notifications() return False if self.setup_env.successful and not self.project.has_valid_clone: self.set_valid_clone() return True def run_build(self, docker, record): """ Build the docs in an environment. :param docker: if ``True``, the build uses a ``DockerBuildEnvironment``, otherwise it uses a ``LocalBuildEnvironment`` to run all the commands to build the docs :param record: whether or not record all the commands in the ``Build`` instance """ env_vars = self.get_env_vars() if docker: env_cls = DockerBuildEnvironment else: env_cls = LocalBuildEnvironment self.build_env = env_cls(project=self.project, version=self.version, config=self.config, build=self.build, record=record, environment=env_vars) # Environment used for building code, usually with Docker with self.build_env: if self.project.documentation_type == 'auto': self.update_documentation_type() python_env_cls = Virtualenv if self.config.use_conda: self._log('Using conda') python_env_cls = Conda self.python_env = python_env_cls(version=self.version, build_env=self.build_env, config=self.config) try: self.setup_python_environment() # TODO the build object should have an idea of these states, extend # the model to include an idea of these outcomes outcomes = self.build_docs() build_id = self.build.get('id') except SoftTimeLimitExceeded: raise BuildEnvironmentError(_('Build exited due to time out')) # Finalize build and update web servers if build_id: self.update_app_instances( html=bool(outcomes['html']), search=bool(outcomes['search']), localmedia=bool(outcomes['localmedia']), pdf=bool(outcomes['pdf']), epub=bool(outcomes['epub']), ) else: log.warning('No build ID, not syncing files') if self.build_env.failed: self.send_notifications() build_complete.send(sender=Build, build=self.build_env.build) @staticmethod def get_project(project_pk): """Get project from API.""" project_data = api_v2.project(project_pk).get() return APIProject(**project_data) @staticmethod def get_build(build_pk): """ Retrieve build object from API. :param build_pk: Build primary key """ build = {} if build_pk: build = api_v2.build(build_pk).get() return dict((key, val) for (key, val) in list(build.items()) if key not in ['project', 'version', 'resource_uri', 'absolute_uri']) def setup_vcs(self): """ Update the checkout of the repo to make sure it's the latest. This also syncs versions in the DB. :param build_env: Build environment """ self.setup_env.update_build(state=BUILD_STATE_CLONING) self._log(msg='Updating docs from VCS') self.sync_repo() commit = self.project.vcs_repo(self.version.slug).commit if commit: self.build['commit'] = commit def get_env_vars(self): """Get bash environment variables used for all builder commands.""" env = { 'READTHEDOCS': True, 'READTHEDOCS_VERSION': self.version.slug, 'READTHEDOCS_PROJECT': self.project.slug } if self.config.use_conda: env.update({ 'CONDA_ENVS_PATH': os.path.join(self.project.doc_path, 'conda'), 'CONDA_DEFAULT_ENV': self.version.slug, 'BIN_PATH': os.path.join(self.project.doc_path, 'conda', self.version.slug, 'bin') }) else: env.update({ 'BIN_PATH': os.path.join(self.project.doc_path, 'envs', self.version.slug, 'bin') }) return env def set_valid_clone(self): """Mark on the project that it has been cloned properly.""" project_data = api_v2.project(self.project.pk).get() project_data['has_valid_clone'] = True api_v2.project(self.project.pk).put(project_data) self.project.has_valid_clone = True def update_documentation_type(self): """ Force Sphinx for 'auto' documentation type. This used to determine the type and automatically set the documentation type to Sphinx for rST and Mkdocs for markdown. It now just forces Sphinx, due to markdown support. """ ret = 'sphinx' project_data = api_v2.project(self.project.pk).get() project_data['documentation_type'] = ret api_v2.project(self.project.pk).put(project_data) self.project.documentation_type = ret def update_app_instances(self, html=False, localmedia=False, search=False, pdf=False, epub=False): """ Update application instances with build artifacts. This triggers updates across application instances for html, pdf, epub, downloads, and search. Tasks are broadcast to all web servers from here. """ # Update version if we have successfully built HTML output try: if html: version = api_v2.version(self.version.pk) version.patch({ 'active': True, 'built': True, }) except HttpClientError: log.exception( 'Updating version failed, skipping file sync: version=%s', self.version, ) # Broadcast finalization steps to web application instances broadcast( type='app', task=sync_files, args=[ self.project.pk, self.version.pk, ], kwargs=dict( hostname=socket.gethostname(), html=html, localmedia=localmedia, search=search, pdf=pdf, epub=epub, ), callback=sync_callback.s(version_pk=self.version.pk, commit=self.build['commit']), ) def setup_python_environment(self): """ Build the virtualenv and install the project into it. Always build projects with a virtualenv. :param build_env: Build environment to pass commands and execution through. """ self.build_env.update_build(state=BUILD_STATE_INSTALLING) with self.project.repo_nonblockinglock( version=self.version, max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): # Check if the python version/build image in the current venv is the # same to be used in this build and if it differs, wipe the venv to # avoid conflicts. if self.python_env.is_obsolete: self.python_env.delete_existing_venv_dir() else: self.python_env.delete_existing_build_dir() self.python_env.setup_base() self.python_env.save_environment_json() self.python_env.install_core_requirements() self.python_env.install_user_requirements() self.python_env.install_package() def build_docs(self): """ Wrapper to all build functions. Executes the necessary builds for this task and returns whether the build was successful or not. :returns: Build outcomes with keys for html, search, localmedia, pdf, and epub :rtype: dict """ self.build_env.update_build(state=BUILD_STATE_BUILDING) before_build.send(sender=self.version) outcomes = defaultdict(lambda: False) with self.project.repo_nonblockinglock( version=self.version, max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): outcomes['html'] = self.build_docs_html() outcomes['search'] = self.build_docs_search() outcomes['localmedia'] = self.build_docs_localmedia() outcomes['pdf'] = self.build_docs_pdf() outcomes['epub'] = self.build_docs_epub() after_build.send(sender=self.version) return outcomes def build_docs_html(self): """Build HTML docs.""" html_builder = get_builder_class(self.project.documentation_type)( build_env=self.build_env, python_env=self.python_env, ) if self.build_force: html_builder.force() html_builder.append_conf() success = html_builder.build() if success: html_builder.move() # Gracefully attempt to move files via task on web workers. try: broadcast(type='app', task=move_files, args=[self.version.pk, socket.gethostname()], kwargs=dict(html=True) ) except socket.error: log.exception('move_files task has failed on socket error.') return success def build_docs_search(self): """Build search data with separate build.""" if self.build_search and self.project.is_type_sphinx: return self.build_docs_class('sphinx_search') return False def build_docs_localmedia(self): """Get local media files with separate build.""" if 'htmlzip' not in self.config.formats: return False if self.build_localmedia: if self.project.is_type_sphinx: return self.build_docs_class('sphinx_singlehtmllocalmedia') return False def build_docs_pdf(self): """Build PDF docs.""" if ('pdf' not in self.config.formats or self.project.slug in HTML_ONLY or not self.project.is_type_sphinx): return False return self.build_docs_class('sphinx_pdf') def build_docs_epub(self): """Build ePub docs.""" if ('epub' not in self.config.formats or self.project.slug in HTML_ONLY or not self.project.is_type_sphinx): return False return self.build_docs_class('sphinx_epub') def build_docs_class(self, builder_class): """ Build docs with additional doc backends. These steps are not necessarily required for the build to halt, so we only raise a warning exception here. A hard error will halt the build process. """ builder = get_builder_class(builder_class)(self.build_env, python_env=self.python_env) success = builder.build() builder.move() return success def send_notifications(self): """Send notifications on build failure.""" send_notifications.delay(self.version.pk, build_pk=self.build['id'])
class UpdateDocsTaskStep(SyncRepositoryMixin): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported, we created it or a webhook is received. Then it will sync the repository and build the html docs if needed. .. note:: This is implemented as a separate class to isolate each run of the underlying task. Previously, we were using a custom ``celery.Task`` for this, but this class is only instantiated once -- on startup. The effect was that this instance shared state between workers. """ def __init__(self, build_env=None, python_env=None, config=None, force=False, search=True, localmedia=True, build=None, project=None, version=None, task=None): self.build_env = build_env self.python_env = python_env self.build_force = force self.build_search = search self.build_localmedia = localmedia self.build = {} if build is not None: self.build = build self.version = {} if version is not None: self.version = version self.project = {} if project is not None: self.project = project if config is not None: self.config = config self.task = task def _log(self, msg): log.info(LOG_TEMPLATE .format(project=self.project.slug, version=self.version.slug, msg=msg)) # pylint: disable=arguments-differ def run(self, pk, version_pk=None, build_pk=None, record=True, docker=None, search=True, force=False, localmedia=True, **__): """ Run a documentation sync n' build. This is fully wrapped in exception handling to account for a number of failure cases. We first run a few commands in a local build environment, but do not report on environment success. This avoids a flicker on the build output page where the build is marked as finished in between the local environment steps and the docker build steps. If a failure is raised, or the build is not successful, return ``False``, otherwise, ``True``. Unhandled exceptions raise a generic user facing error, which directs the user to bug us. It is therefore a benefit to have as few unhandled errors as possible. :param pk int: Project id :param version_pk int: Project Version id (latest if None) :param build_pk int: Build id (if None, commands are not recorded) :param record bool: record a build object in the database :param docker bool: use docker to build the project (if ``None``, ``settings.DOCKER_ENABLE`` is used) :param search bool: update search :param force bool: force Sphinx build :param localmedia bool: update localmedia :returns: whether build was successful or not :rtype: bool """ try: if docker is None: docker = settings.DOCKER_ENABLE self.project = self.get_project(pk) self.version = self.get_version(self.project, version_pk) self.build = self.get_build(build_pk) self.build_search = search self.build_localmedia = localmedia self.build_force = force self.config = None setup_successful = self.run_setup(record=record) if not setup_successful: return False # Catch unhandled errors in the setup step except Exception as e: # noqa log.exception( 'An unhandled exception was raised during build setup', extra={'tags': {'build': build_pk}} ) self.setup_env.failure = BuildEnvironmentError( BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format( build_id=build_pk, ) ) self.setup_env.update_build(BUILD_STATE_FINISHED) return False else: # No exceptions in the setup step, catch unhandled errors in the # build steps try: self.run_build(docker=docker, record=record) except Exception as e: # noqa log.exception( 'An unhandled exception was raised during project build', extra={'tags': {'build': build_pk}} ) self.build_env.failure = BuildEnvironmentError( BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format( build_id=build_pk, ) ) self.build_env.update_build(BUILD_STATE_FINISHED) return False return True def run_setup(self, record=True): """ Run setup in the local environment. Return True if successful. """ self.setup_env = LocalBuildEnvironment( project=self.project, version=self.version, build=self.build, record=record, update_on_success=False, ) # Environment used for code checkout & initial configuration reading with self.setup_env: if self.project.skip: raise BuildEnvironmentError( _('Builds for this project are temporarily disabled')) try: self.setup_vcs() except vcs_support_utils.LockTimeout as e: self.task.retry(exc=e, throw=False) raise BuildEnvironmentError( 'Version locked, retrying in 5 minutes.', status_code=423 ) try: self.config = load_yaml_config(version=self.version) except ConfigError as e: raise BuildEnvironmentError( 'Problem parsing YAML configuration. {0}'.format(str(e)) ) if self.setup_env.failure or self.config is None: self._log('Failing build because of setup failure: %s' % self.setup_env.failure) # Send notification to users only if the build didn't fail because of # LockTimeout: this exception occurs when a build is triggered before the previous # one has finished (e.g. two webhooks, one after the other) if not isinstance(self.setup_env.failure, vcs_support_utils.LockTimeout): self.send_notifications() return False if self.setup_env.successful and not self.project.has_valid_clone: self.set_valid_clone() return True def run_build(self, docker, record): """ Build the docs in an environment. :param docker: if ``True``, the build uses a ``DockerBuildEnvironment``, otherwise it uses a ``LocalBuildEnvironment`` to run all the commands to build the docs :param record: whether or not record all the commands in the ``Build`` instance """ env_vars = self.get_env_vars() if docker: env_cls = DockerBuildEnvironment else: env_cls = LocalBuildEnvironment self.build_env = env_cls(project=self.project, version=self.version, config=self.config, build=self.build, record=record, environment=env_vars) # Environment used for building code, usually with Docker with self.build_env: if self.project.documentation_type == 'auto': self.update_documentation_type() python_env_cls = Virtualenv if self.config.use_conda: self._log('Using conda') python_env_cls = Conda self.python_env = python_env_cls(version=self.version, build_env=self.build_env, config=self.config) try: self.setup_python_environment() # TODO the build object should have an idea of these states, extend # the model to include an idea of these outcomes outcomes = self.build_docs() build_id = self.build.get('id') except vcs_support_utils.LockTimeout as e: self.task.retry(exc=e, throw=False) raise BuildEnvironmentError( 'Version locked, retrying in 5 minutes.', status_code=423 ) except SoftTimeLimitExceeded: raise BuildEnvironmentError(_('Build exited due to time out')) # Finalize build and update web servers if build_id: self.update_app_instances( html=bool(outcomes['html']), search=bool(outcomes['search']), localmedia=bool(outcomes['localmedia']), pdf=bool(outcomes['pdf']), epub=bool(outcomes['epub']), ) else: log.warning('No build ID, not syncing files') if self.build_env.failed: self.send_notifications() build_complete.send(sender=Build, build=self.build_env.build) @staticmethod def get_project(project_pk): """Get project from API.""" project_data = api_v2.project(project_pk).get() return APIProject(**project_data) @staticmethod def get_build(build_pk): """ Retrieve build object from API. :param build_pk: Build primary key """ build = {} if build_pk: build = api_v2.build(build_pk).get() return dict((key, val) for (key, val) in list(build.items()) if key not in ['project', 'version', 'resource_uri', 'absolute_uri']) def setup_vcs(self): """ Update the checkout of the repo to make sure it's the latest. This also syncs versions in the DB. :param build_env: Build environment """ self.setup_env.update_build(state=BUILD_STATE_CLONING) self._log(msg='Updating docs from VCS') self.sync_repo() commit = self.project.vcs_repo(self.version.slug).commit if commit: self.build['commit'] = commit def get_env_vars(self): """Get bash environment variables used for all builder commands.""" env = { 'READTHEDOCS': True, 'READTHEDOCS_VERSION': self.version.slug, 'READTHEDOCS_PROJECT': self.project.slug } if self.config.use_conda: env.update({ 'CONDA_ENVS_PATH': os.path.join(self.project.doc_path, 'conda'), 'CONDA_DEFAULT_ENV': self.version.slug, 'BIN_PATH': os.path.join(self.project.doc_path, 'conda', self.version.slug, 'bin') }) else: env.update({ 'BIN_PATH': os.path.join(self.project.doc_path, 'envs', self.version.slug, 'bin') }) return env def set_valid_clone(self): """Mark on the project that it has been cloned properly.""" project_data = api_v2.project(self.project.pk).get() project_data['has_valid_clone'] = True api_v2.project(self.project.pk).put(project_data) self.project.has_valid_clone = True self.version.project.has_valid_clone = True def update_documentation_type(self): """ Force Sphinx for 'auto' documentation type. This used to determine the type and automatically set the documentation type to Sphinx for rST and Mkdocs for markdown. It now just forces Sphinx, due to markdown support. """ ret = 'sphinx' project_data = api_v2.project(self.project.pk).get() project_data['documentation_type'] = ret api_v2.project(self.project.pk).put(project_data) self.project.documentation_type = ret self.version.project.documentation_type = ret def update_app_instances(self, html=False, localmedia=False, search=False, pdf=False, epub=False): """ Update application instances with build artifacts. This triggers updates across application instances for html, pdf, epub, downloads, and search. Tasks are broadcast to all web servers from here. """ # Update version if we have successfully built HTML output try: if html: version = api_v2.version(self.version.pk) version.patch({ 'active': True, 'built': True, }) except HttpClientError: log.exception( 'Updating version failed, skipping file sync: version=%s', self.version, ) # Broadcast finalization steps to web application instances broadcast( type='app', task=sync_files, args=[ self.project.pk, self.version.pk, ], kwargs=dict( hostname=socket.gethostname(), html=html, localmedia=localmedia, search=search, pdf=pdf, epub=epub, ), callback=sync_callback.s(version_pk=self.version.pk, commit=self.build['commit']), ) def setup_python_environment(self): """ Build the virtualenv and install the project into it. Always build projects with a virtualenv. :param build_env: Build environment to pass commands and execution through. """ self.build_env.update_build(state=BUILD_STATE_INSTALLING) with self.project.repo_nonblockinglock( version=self.version, max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): # Check if the python version/build image in the current venv is the # same to be used in this build and if it differs, wipe the venv to # avoid conflicts. if self.python_env.is_obsolete: self.python_env.delete_existing_venv_dir() else: self.python_env.delete_existing_build_dir() self.python_env.setup_base() self.python_env.save_environment_json() self.python_env.install_core_requirements() self.python_env.install_user_requirements() self.python_env.install_package() def build_docs(self): """ Wrapper to all build functions. Executes the necessary builds for this task and returns whether the build was successful or not. :returns: Build outcomes with keys for html, search, localmedia, pdf, and epub :rtype: dict """ self.build_env.update_build(state=BUILD_STATE_BUILDING) before_build.send(sender=self.version) outcomes = defaultdict(lambda: False) with self.project.repo_nonblockinglock( version=self.version, max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): outcomes['html'] = self.build_docs_html() outcomes['search'] = self.build_docs_search() outcomes['localmedia'] = self.build_docs_localmedia() outcomes['pdf'] = self.build_docs_pdf() outcomes['epub'] = self.build_docs_epub() after_build.send(sender=self.version) return outcomes def build_docs_html(self): """Build HTML docs.""" html_builder = get_builder_class(self.project.documentation_type)( build_env=self.build_env, python_env=self.python_env, ) if self.build_force: html_builder.force() html_builder.append_conf() success = html_builder.build() if success: html_builder.move() # Gracefully attempt to move files via task on web workers. try: broadcast(type='app', task=move_files, args=[self.version.pk, socket.gethostname()], kwargs=dict(html=True) ) except socket.error: log.exception('move_files task has failed on socket error.') return success def build_docs_search(self): """ Build search data with separate build. Unless the project has the feature to allow building the JSON search artifacts in the html build step. """ build_json_in_html_builder = self.project.has_feature( Feature.BUILD_JSON_ARTIFACTS_WITH_HTML ) if self.build_search and build_json_in_html_builder: # Already built in the html step return True if self.build_search and self.project.is_type_sphinx: return self.build_docs_class('sphinx_search') return False def build_docs_localmedia(self): """Get local media files with separate build.""" if 'htmlzip' not in self.config.formats: return False if self.build_localmedia: if self.project.is_type_sphinx: return self.build_docs_class('sphinx_singlehtmllocalmedia') return False def build_docs_pdf(self): """Build PDF docs.""" if ('pdf' not in self.config.formats or self.project.slug in HTML_ONLY or not self.project.is_type_sphinx): return False return self.build_docs_class('sphinx_pdf') def build_docs_epub(self): """Build ePub docs.""" if ('epub' not in self.config.formats or self.project.slug in HTML_ONLY or not self.project.is_type_sphinx): return False return self.build_docs_class('sphinx_epub') def build_docs_class(self, builder_class): """ Build docs with additional doc backends. These steps are not necessarily required for the build to halt, so we only raise a warning exception here. A hard error will halt the build process. """ builder = get_builder_class(builder_class)(self.build_env, python_env=self.python_env) success = builder.build() builder.move() return success def send_notifications(self): """Send notifications on build failure.""" send_notifications.delay(self.version.pk, build_pk=self.build['id'])
class UpdateDocsTaskStep(SyncRepositoryMixin): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported, we created it or a webhook is received. Then it will sync the repository and build the html docs if needed. .. note:: This is implemented as a separate class to isolate each run of the underlying task. Previously, we were using a custom ``celery.Task`` for this, but this class is only instantiated once -- on startup. The effect was that this instance shared state between workers. """ def __init__( self, build_env=None, python_env=None, config=None, force=False, build=None, project=None, version=None, task=None, ): self.build_env = build_env self.python_env = python_env self.build_force = force self.build = {} if build is not None: self.build = build self.version = {} if version is not None: self.version = version self.project = {} if project is not None: self.project = project if config is not None: self.config = config self.task = task self.setup_env = None # pylint: disable=arguments-differ def run(self, pk, version_pk=None, build_pk=None, record=True, docker=None, force=False, **__): """ Run a documentation sync n' build. This is fully wrapped in exception handling to account for a number of failure cases. We first run a few commands in a local build environment, but do not report on environment success. This avoids a flicker on the build output page where the build is marked as finished in between the local environment steps and the docker build steps. If a failure is raised, or the build is not successful, return ``False``, otherwise, ``True``. Unhandled exceptions raise a generic user facing error, which directs the user to bug us. It is therefore a benefit to have as few unhandled errors as possible. :param pk int: Project id :param version_pk int: Project Version id (latest if None) :param build_pk int: Build id (if None, commands are not recorded) :param record bool: record a build object in the database :param docker bool: use docker to build the project (if ``None``, ``settings.DOCKER_ENABLE`` is used) :param force bool: force Sphinx build :returns: whether build was successful or not :rtype: bool """ try: if docker is None: docker = settings.DOCKER_ENABLE self.project = self.get_project(pk) self.version = self.get_version(self.project, version_pk) self.build = self.get_build(build_pk) self.build_force = force self.config = None setup_successful = self.run_setup(record=record) if not setup_successful: return False # Catch unhandled errors in the setup step except Exception as e: # noqa log.exception( 'An unhandled exception was raised during build setup', extra={ 'stack': True, 'tags': { 'build': build_pk, 'project': self.project.slug, 'version': self.version.slug, }, }, ) if self.setup_env is not None: self.setup_env.failure = BuildEnvironmentError( BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format( build_id=build_pk, ), ) self.setup_env.update_build(BUILD_STATE_FINISHED) # Send notifications for unhandled errors self.send_notifications() return False else: # No exceptions in the setup step, catch unhandled errors in the # build steps try: self.run_build(docker=docker, record=record) except Exception as e: # noqa log.exception( 'An unhandled exception was raised during project build', extra={ 'stack': True, 'tags': { 'build': build_pk, 'project': self.project.slug, 'version': self.version.slug, }, }, ) if self.build_env is not None: self.build_env.failure = BuildEnvironmentError( BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format( build_id=build_pk, ), ) self.build_env.update_build(BUILD_STATE_FINISHED) # Send notifications for unhandled errors self.send_notifications() return False return True def run_setup(self, record=True): """ Run setup in the local environment. Return True if successful. """ self.setup_env = LocalBuildEnvironment( project=self.project, version=self.version, build=self.build, record=record, update_on_success=False, ) # Environment used for code checkout & initial configuration reading with self.setup_env: if self.project.skip: raise ProjectBuildsSkippedError try: self.setup_vcs() except vcs_support_utils.LockTimeout as e: self.task.retry(exc=e, throw=False) raise VersionLockedError try: self.config = load_yaml_config(version=self.version) except ConfigError as e: raise YAMLParseError( YAMLParseError.GENERIC_WITH_PARSE_EXCEPTION.format( exception=str(e), ), ) self.save_build_config() self.additional_vcs_operations() if self.setup_env.failure or self.config is None: msg = 'Failing build because of setup failure: {}'.format( self.setup_env.failure, ) log.info( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg=msg, ), ) # Send notification to users only if the build didn't fail because # of VersionLockedError: this exception occurs when a build is # triggered before the previous one has finished (e.g. two webhooks, # one after the other) if not isinstance(self.setup_env.failure, VersionLockedError): self.send_notifications() return False if self.setup_env.successful and not self.project.has_valid_clone: self.set_valid_clone() return True def additional_vcs_operations(self): """ Execution of tasks that involve the project's VCS. All this tasks have access to the configuration object. """ version_repo = self.get_vcs_repo() if version_repo.supports_submodules: version_repo.update_submodules(self.config) def run_build(self, docker, record): """ Build the docs in an environment. :param docker: if ``True``, the build uses a ``DockerBuildEnvironment``, otherwise it uses a ``LocalBuildEnvironment`` to run all the commands to build the docs :param record: whether or not record all the commands in the ``Build`` instance """ env_vars = self.get_env_vars() if docker: env_cls = DockerBuildEnvironment else: env_cls = LocalBuildEnvironment self.build_env = env_cls( project=self.project, version=self.version, config=self.config, build=self.build, record=record, environment=env_vars, ) # Environment used for building code, usually with Docker with self.build_env: python_env_cls = Virtualenv if self.config.conda is not None: log.info( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg='Using conda', ), ) python_env_cls = Conda self.python_env = python_env_cls( version=self.version, build_env=self.build_env, config=self.config, ) try: self.setup_python_environment() # TODO the build object should have an idea of these states, # extend the model to include an idea of these outcomes outcomes = self.build_docs() build_id = self.build.get('id') except vcs_support_utils.LockTimeout as e: self.task.retry(exc=e, throw=False) raise VersionLockedError except SoftTimeLimitExceeded: raise BuildTimeoutError # Finalize build and update web servers if build_id: self.update_app_instances( html=bool(outcomes['html']), search=bool(outcomes['search']), localmedia=bool(outcomes['localmedia']), pdf=bool(outcomes['pdf']), epub=bool(outcomes['epub']), ) else: log.warning('No build ID, not syncing files') if self.build_env.failed: self.send_notifications() build_complete.send(sender=Build, build=self.build_env.build) @staticmethod def get_project(project_pk): """Get project from API.""" project_data = api_v2.project(project_pk).get() return APIProject(**project_data) @staticmethod def get_build(build_pk): """ Retrieve build object from API. :param build_pk: Build primary key """ build = {} if build_pk: build = api_v2.build(build_pk).get() private_keys = [ 'project', 'version', 'resource_uri', 'absolute_uri', ] return { key: val for key, val in build.items() if key not in private_keys } def setup_vcs(self): """ Update the checkout of the repo to make sure it's the latest. This also syncs versions in the DB. :param build_env: Build environment """ self.setup_env.update_build(state=BUILD_STATE_CLONING) log.info( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg='Updating docs from VCS', ), ) try: self.sync_repo() except RepositoryError: # Do not log as ERROR handled exceptions log.warning('There was an error with the repository', exc_info=True) except vcs_support_utils.LockTimeout: log.info( 'Lock still active: project=%s version=%s', self.project.slug, self.version.slug, ) except Exception: # Catch unhandled errors when syncing log.exception( 'An unhandled exception was raised during VCS syncing', extra={ 'stack': True, 'tags': { 'build': self.build['id'], 'project': self.project.slug, 'version': self.version.slug, }, }, ) commit = self.project.vcs_repo(self.version.slug).commit if commit: self.build['commit'] = commit def get_env_vars(self): """Get bash environment variables used for all builder commands.""" env = { 'READTHEDOCS': True, 'READTHEDOCS_VERSION': self.version.slug, 'READTHEDOCS_PROJECT': self.project.slug, } if self.config.conda is not None: env.update({ 'CONDA_ENVS_PATH': os.path.join(self.project.doc_path, 'conda'), 'CONDA_DEFAULT_ENV': self.version.slug, 'BIN_PATH': os.path.join( self.project.doc_path, 'conda', self.version.slug, 'bin', ), }) else: env.update({ 'BIN_PATH': os.path.join( self.project.doc_path, 'envs', self.version.slug, 'bin', ), }) # Update environment from Project's specific environment variables env.update(self.project.environment_variables) return env def set_valid_clone(self): """Mark on the project that it has been cloned properly.""" project_data = api_v2.project(self.project.pk).get() project_data['has_valid_clone'] = True api_v2.project(self.project.pk).put(project_data) self.project.has_valid_clone = True self.version.project.has_valid_clone = True def save_build_config(self): """Save config in the build object.""" pk = self.build['id'] config = self.config.as_dict() api_v2.build(pk).patch({ 'config': config, }) self.build['config'] = config def update_app_instances( self, html=False, localmedia=False, search=False, pdf=False, epub=False, ): """ Update application instances with build artifacts. This triggers updates across application instances for html, pdf, epub, downloads, and search. Tasks are broadcast to all web servers from here. """ # Update version if we have successfully built HTML output try: if html: version = api_v2.version(self.version.pk) version.patch({ 'built': True, }) except HttpClientError: log.exception( 'Updating version failed, skipping file sync: version=%s', self.version, ) # Broadcast finalization steps to web application instances broadcast( type='app', task=sync_files, args=[ self.project.pk, self.version.pk, self.config.doctype, ], kwargs=dict( hostname=socket.gethostname(), html=html, localmedia=localmedia, search=search, pdf=pdf, epub=epub, ), callback=sync_callback.s( version_pk=self.version.pk, commit=self.build['commit'], search=search, ), ) def setup_python_environment(self): """ Build the virtualenv and install the project into it. Always build projects with a virtualenv. :param build_env: Build environment to pass commands and execution through. """ self.build_env.update_build(state=BUILD_STATE_INSTALLING) with self.project.repo_nonblockinglock(version=self.version): # Check if the python version/build image in the current venv is the # same to be used in this build and if it differs, wipe the venv to # avoid conflicts. if self.python_env.is_obsolete: self.python_env.delete_existing_venv_dir() else: self.python_env.delete_existing_build_dir() self.python_env.setup_base() self.python_env.save_environment_json() self.python_env.install_core_requirements() self.python_env.install_requirements() def build_docs(self): """ Wrapper to all build functions. Executes the necessary builds for this task and returns whether the build was successful or not. :returns: Build outcomes with keys for html, search, localmedia, pdf, and epub :rtype: dict """ self.build_env.update_build(state=BUILD_STATE_BUILDING) before_build.send(sender=self.version) outcomes = defaultdict(lambda: False) with self.project.repo_nonblockinglock(version=self.version): outcomes['html'] = self.build_docs_html() outcomes['search'] = self.build_docs_search() outcomes['localmedia'] = self.build_docs_localmedia() outcomes['pdf'] = self.build_docs_pdf() outcomes['epub'] = self.build_docs_epub() after_build.send(sender=self.version) return outcomes def build_docs_html(self): """Build HTML docs.""" html_builder = get_builder_class(self.config.doctype)( build_env=self.build_env, python_env=self.python_env, ) if self.build_force: html_builder.force() html_builder.append_conf() success = html_builder.build() if success: html_builder.move() # Gracefully attempt to move files via task on web workers. try: broadcast( type='app', task=move_files, args=[ self.version.pk, socket.gethostname(), self.config.doctype ], kwargs=dict(html=True), ) except socket.error: log.exception('move_files task has failed on socket error.') return success def build_docs_search(self): """Build search data.""" # Search is always run in sphinx using the rtd-sphinx-extension. # Mkdocs has no search currently. if self.is_type_sphinx(): return True return False def build_docs_localmedia(self): """Get local media files with separate build.""" if 'htmlzip' not in self.config.formats: return False # We don't generate a zip for mkdocs currently. if self.is_type_sphinx(): return self.build_docs_class('sphinx_singlehtmllocalmedia') return False def build_docs_pdf(self): """Build PDF docs.""" if 'pdf' not in self.config.formats: return False # Mkdocs has no pdf generation currently. if self.is_type_sphinx(): return self.build_docs_class('sphinx_pdf') return False def build_docs_epub(self): """Build ePub docs.""" if 'epub' not in self.config.formats: return False # Mkdocs has no epub generation currently. if self.is_type_sphinx(): return self.build_docs_class('sphinx_epub') return False def build_docs_class(self, builder_class): """ Build docs with additional doc backends. These steps are not necessarily required for the build to halt, so we only raise a warning exception here. A hard error will halt the build process. """ builder = get_builder_class(builder_class)( self.build_env, python_env=self.python_env, ) success = builder.build() builder.move() return success def send_notifications(self): """Send notifications on build failure.""" send_notifications.delay(self.version.pk, build_pk=self.build['id']) def is_type_sphinx(self): """Is documentation type Sphinx.""" return 'sphinx' in self.config.doctype