def test_incremental_state_update_with_no_update(self): """Build updates to a non-finished state when update_on_success=True.""" build_envs = [ LocalEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, ), LocalEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, update_on_success=False, ), ] for build_env in build_envs: with build_env: build_env.update_build(BUILD_STATE_CLONING) self.mocks.mocks['api_v2.build']().put.assert_called_with({ 'id': DUMMY_BUILD_ID, 'version': self.version.pk, 'success': True, 'project': self.project.pk, 'setup_error': u'', 'length': mock.ANY, 'error': '', 'setup': u'', 'output': u'', 'state': BUILD_STATE_CLONING, 'builder': mock.ANY, 'exit_code': 0, })
def __init__(self, project): self.project = project self.project_root = os.path.join( self.WEB_ROOT, project.slug, ) self.subproject_root = os.path.join( self.project_root, 'projects', ) self.environment = LocalEnvironment(project) self.sanity_check()
def test_normal_execution(self): """Normal build in passing state""" self.mocks.configure_mock("process", {"communicate.return_value": ("This is okay", "")}) type(self.mocks.process).returncode = PropertyMock(return_value=0) build_env = LocalEnvironment(version=self.version, project=self.project, build={}) with build_env: build_env.run("echo", "test") self.assertTrue(self.mocks.process.communicate.called) self.assertTrue(build_env.done) self.assertTrue(build_env.successful) self.assertEqual(len(build_env.commands), 1) self.assertEqual(build_env.commands[0].output, u"This is okay")
def test_failing_execution(self): """Build in failing state""" self.mocks.configure_mock("process", {"communicate.return_value": ("This is not okay", "")}) type(self.mocks.process).returncode = PropertyMock(return_value=1) build_env = LocalEnvironment(version=self.version, project=self.project, build={}) with build_env: build_env.run("echo", "test") self.fail("This should be unreachable") self.assertTrue(self.mocks.process.communicate.called) self.assertTrue(build_env.done) self.assertTrue(build_env.failed) self.assertEqual(len(build_env.commands), 1) self.assertEqual(build_env.commands[0].output, u"This is not okay")
def run_setup(self, record=True): """ Run setup in the local environment. Return True if successful. """ self.setup_env = LocalEnvironment( project=self.project, version=self.version, build=self.build, record=record, update_on_success=False, ) # Environment used for code checkout & initial configuration reading with self.setup_env: if self.project.skip: raise BuildEnvironmentError( _('Builds for this project are temporarily disabled')) try: self.setup_vcs() except vcs_support_utils.LockTimeout as e: self.retry(exc=e, throw=False) raise BuildEnvironmentError( 'Version locked, retrying in 5 minutes.', status_code=423 ) try: self.config = load_yaml_config(version=self.version) except ConfigError as e: raise BuildEnvironmentError( 'Problem parsing YAML configuration. {0}'.format(str(e)) ) if self.setup_env.failure or self.config is None: self._log('Failing build because of setup failure: %s' % self.setup_env.failure) # Send notification to users only if the build didn't fail because of # LockTimeout: this exception occurs when a build is triggered before the previous # one has finished (e.g. two webhooks, one after the other) if not isinstance(self.setup_env.failure, vcs_support_utils.LockTimeout): self.send_notifications() return False if self.setup_env.successful and not self.project.has_valid_clone: self.set_valid_clone() return True
def test_failing_execution(self): """Build in failing state.""" self.mocks.configure_mock( 'process', {'communicate.return_value': (b'This is not okay', '')}) type(self.mocks.process).returncode = PropertyMock(return_value=1) build_env = LocalEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, ) with build_env: build_env.run('echo', 'test') self.fail('This should be unreachable') self.assertTrue(self.mocks.process.communicate.called) self.assertTrue(build_env.done) self.assertTrue(build_env.failed) self.assertEqual(len(build_env.commands), 1) self.assertEqual(build_env.commands[0].output, u'This is not okay') # api() is not called anymore, we use api_v2 instead self.assertFalse(self.mocks.api()(DUMMY_BUILD_ID).put.called) self.mocks.mocks['api_v2.build']().put.assert_called_with({ 'id': DUMMY_BUILD_ID, 'version': self.version.pk, 'success': False, 'project': self.project.pk, 'setup_error': u'', 'length': mock.ANY, 'error': '', 'setup': u'', 'output': u'', 'state': u'finished', 'builder': mock.ANY, 'exit_code': 1, })
def test_normal_execution(self): '''Normal build in passing state''' self.mocks.configure_mock('process', { 'communicate.return_value': ('This is okay', '')}) type(self.mocks.process).returncode = PropertyMock(return_value=0) build_env = LocalEnvironment(version=self.version, project=self.project, build={}) with build_env: build_env.run('echo', 'test') self.assertTrue(self.mocks.process.communicate.called) self.assertTrue(build_env.done) self.assertTrue(build_env.successful) self.assertEqual(len(build_env.commands), 1) self.assertEqual(build_env.commands[0].output, u'This is okay')
def test_failing_execution_with_unexpected_exception(self): """Build in failing state with exception from code.""" build_env = LocalEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, ) with build_env: raise ValueError('uncaught') self.assertFalse(self.mocks.process.communicate.called) self.assertTrue(build_env.done) self.assertTrue(build_env.failed) # api() is not called anymore, we use api_v2 instead self.assertFalse(self.mocks.api()(DUMMY_BUILD_ID).put.called) self.mocks.mocks['api_v2.build']().put.assert_called_with({ 'id': DUMMY_BUILD_ID, 'version': self.version.pk, 'success': False, 'project': self.project.pk, 'setup_error': u'', 'length': mock.ANY, 'error': ( 'There was a problem with Read the Docs while building your ' 'documentation. Please report this to us with your build id (123).' ), 'setup': u'', 'output': u'', 'state': u'finished', 'builder': mock.ANY, })
def test_failing_execution_with_caught_exception(self): """Build in failing state with BuildEnvironmentError exception.""" build_env = LocalEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, ) with build_env: raise BuildEnvironmentError('Foobar') self.assertFalse(self.mocks.process.communicate.called) self.assertEqual(len(build_env.commands), 0) self.assertTrue(build_env.done) self.assertTrue(build_env.failed) # api() is not called anymore, we use api_v2 instead self.assertFalse(self.mocks.api()(DUMMY_BUILD_ID).put.called) self.mocks.mocks['api_v2.build']().put.assert_called_with({ 'id': DUMMY_BUILD_ID, 'version': self.version.pk, 'success': False, 'project': self.project.pk, 'setup_error': u'', 'length': mock.ANY, 'error': 'Foobar', 'setup': u'', 'output': u'', 'state': u'finished', 'builder': mock.ANY, 'exit_code': 1, })
def test_build_respects_pdf_flag(self): '''Build output format control''' project = get(Project, slug='project-1', documentation_type='sphinx', conf_py_file='test_conf.py', enable_pdf_build=True, enable_epub_build=False, versions=[fixture()]) version = project.versions.all()[0] build_env = LocalEnvironment(project=project, version=version, build={}) task = UpdateDocsTask(build_env=build_env, version=version, project=project, search=False, localmedia=False) built_docs = task.build_docs() # The HTML and the Epub format were built. self.mocks.html_build.assert_called_once_with() self.mocks.pdf_build.assert_called_once_with() # PDF however was disabled and therefore not built. self.assertFalse(self.mocks.epub_build.called)
def test_failing_execution(self): '''Build in failing state''' self.mocks.configure_mock('process', { 'communicate.return_value': ('This is not okay', '')}) type(self.mocks.process).returncode = PropertyMock(return_value=1) build_env = LocalEnvironment(version=self.version, project=self.project, build={}) with build_env: build_env.run('echo', 'test') self.fail('This should be unreachable') self.assertTrue(self.mocks.process.communicate.called) self.assertTrue(build_env.done) self.assertTrue(build_env.failed) self.assertEqual(len(build_env.commands), 1) self.assertEqual(build_env.commands[0].output, u'This is not okay')
def test_build_respects_epub_flag(self): '''Test build with epub enabled''' project = get(Project, slug='project-1', documentation_type='sphinx', conf_py_file='test_conf.py', enable_pdf_build=False, enable_epub_build=True, versions=[fixture()]) version = project.versions.all()[0] build_env = LocalEnvironment(project=project, version=version, build={}) python_env = Virtualenv(version=version, build_env=build_env) yaml_config = get_build_config({}) config = ConfigWrapper(version=version, yaml_config=yaml_config) task = UpdateDocsTask(build_env=build_env, project=project, python_env=python_env, version=version, search=False, localmedia=False, config=config) task.build_docs() # The HTML and the Epub format were built. self.mocks.html_build.assert_called_once_with() self.mocks.epub_build.assert_called_once_with() # PDF however was disabled and therefore not built. self.assertFalse(self.mocks.pdf_build.called)
def test_build(self): '''Test full build''' project = get(Project, slug='project-1', documentation_type='sphinx', conf_py_file='test_conf.py', versions=[fixture()]) version = project.versions.all()[0] self.mocks.configure_mock('api_versions', {'return_value': [version]}) self.mocks.configure_mock( 'api', {'get.return_value': { 'downloads': "no_url_here" }}) self.mocks.patches['html_build'].stop() build_env = LocalEnvironment(project=project, version=version, build={}) python_env = Virtualenv(version=version, build_env=build_env) yaml_config = get_build_config({}) config = ConfigWrapper(version=version, yaml_config=yaml_config) task = UpdateDocsTask(build_env=build_env, project=project, python_env=python_env, version=version, search=False, localmedia=False, config=config) task.build_docs() # Get command and check first part of command list is a call to sphinx self.assertEqual(self.mocks.popen.call_count, 1) cmd = self.mocks.popen.call_args_list[0][0] self.assertRegexpMatches(cmd[0][0], r'python') self.assertRegexpMatches(cmd[0][1], r'sphinx-build')
def __init__(self, project, version_slug, environment=None, **kwargs): self.default_branch = project.default_branch self.name = project.name self.repo_url = project.clean_repo self.working_dir = project.checkout_path(version_slug) from readthedocs.doc_builder.environments import LocalEnvironment self.environment = environment or LocalEnvironment(project) # Update the env variables with the proper VCS env variables self.environment.environment.update(self.env)
def test_failing_execution_with_caught_exception(self): '''Build in failing state with BuildEnvironmentError exception''' build_env = LocalEnvironment(version=self.version, project=self.project, build={}) with build_env: raise BuildEnvironmentError('Foobar') self.assertFalse(self.mocks.process.communicate.called) self.assertEqual(len(build_env.commands), 0) self.assertTrue(build_env.done) self.assertTrue(build_env.failed)
def test_builder_comments(self): '''Normal build with comments''' project = get(Project, documentation_type='sphinx', allow_comments=True, versions=[fixture()]) version = project.versions.all()[0] build_env = LocalEnvironment(version=version, project=project, build={}) python_env = Virtualenv(version=version, build_env=build_env) builder_class = get_builder_class(project.documentation_type) builder = builder_class(build_env, python_env) self.assertEqual(builder.sphinx_builder, 'readthedocs-comments')
def test_failing_execution_with_unexpected_exception(self): '''Build in failing state with exception from code''' build_env = LocalEnvironment(version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}) with build_env: raise ValueError('uncaught') self.assertFalse(self.mocks.process.communicate.called) self.assertTrue(build_env.done) self.assertTrue(build_env.failed) self.assertFalse(self.mocks.api()(DUMMY_BUILD_ID).put.called)
def test_failing_execution_with_uncaught_exception(self): '''Build in failing state with exception from code''' build_env = LocalEnvironment(version=self.version, project=self.project, build={}) def _inner(): with build_env: raise Exception() self.assertRaises(Exception, _inner) self.assertFalse(self.mocks.process.communicate.called) self.assertTrue(build_env.done) self.assertTrue(build_env.failed)
def test_builder_no_comments(self): '''Test builder without comments''' project = get(Project, documentation_type='sphinx', allow_comments=False, versions=[fixture()]) version = project.versions.all()[0] build_env = LocalEnvironment(version=version, project=project, build={}) builder_class = get_builder_class(project.documentation_type) builder = builder_class(build_env) self.assertEqual(builder.sphinx_builder, 'readthedocs')
def test_failing_execution(self): """Build in failing state.""" self.mocks.configure_mock('process', { 'communicate.return_value': (b'This is not okay', '') }) type(self.mocks.process).returncode = PropertyMock(return_value=1) build_env = LocalEnvironment( version=self.version, project=self.project, build={'id': DUMMY_BUILD_ID}, ) with build_env: build_env.run('echo', 'test') self.fail('This should be unreachable') self.assertTrue(self.mocks.process.communicate.called) self.assertTrue(build_env.done) self.assertTrue(build_env.failed) self.assertEqual(len(build_env.commands), 1) self.assertEqual(build_env.commands[0].output, u'This is not okay') # api() is not called anymore, we use api_v2 instead self.assertFalse(self.mocks.api()(DUMMY_BUILD_ID).put.called) self.mocks.mocks['api_v2.build']().put.assert_called_with({ 'id': DUMMY_BUILD_ID, 'version': self.version.pk, 'success': False, 'project': self.project.pk, 'setup_error': u'', 'length': mock.ANY, 'error': '', 'setup': u'', 'output': u'', 'state': u'finished', 'builder': mock.ANY, 'exit_code': 1, })
def test_build_pdf_latex_not_failure(self): '''Test pass during PDF builds and bad latex failure status code''' self.mocks.patches['html_build'].stop() self.mocks.patches['pdf_build'].stop() project = get(Project, slug='project-2', documentation_type='sphinx', conf_py_file='test_conf.py', enable_pdf_build=True, enable_epub_build=False, versions=[fixture()]) version = project.versions.all()[0] assert project.conf_dir() == '/tmp/rtd' build_env = LocalEnvironment(project=project, version=version, build={}) python_env = Virtualenv(version=version, build_env=build_env) yaml_config = get_build_config({}) config = ConfigWrapper(version=version, yaml_config=yaml_config) task = UpdateDocsTask(build_env=build_env, project=project, python_env=python_env, version=version, search=False, localmedia=False, config=config) # Mock out the separate calls to Popen using an iterable side_effect returns = [ (('', ''), 0), # sphinx-build html (('', ''), 0), # sphinx-build pdf (('Output written on foo.pdf', ''), 1), # latex (('', ''), 0), # makeindex (('', ''), 0), # latex ] mock_obj = mock.Mock() mock_obj.communicate.side_effect = [ output for (output, status) in returns ] type(mock_obj).returncode = mock.PropertyMock( side_effect=[status for (output, status) in returns]) self.mocks.popen.return_value = mock_obj with build_env: task.build_docs() self.assertEqual(self.mocks.popen.call_count, 5) self.assertTrue(build_env.successful)
def test_build_pdf_latex_failures(self): '''Build failure if latex fails''' if six.PY3: import pytest pytest.xfail( "test_build_pdf_latex_failures is known to fail on 3.6") self.mocks.patches['html_build'].stop() self.mocks.patches['pdf_build'].stop() project = get(Project, slug='project-1', documentation_type='sphinx', conf_py_file='test_conf.py', enable_pdf_build=True, enable_epub_build=False, versions=[fixture()]) version = project.versions.all()[0] assert project.conf_dir() == '/tmp/rtd' build_env = LocalEnvironment(project=project, version=version, build={}) python_env = Virtualenv(version=version, build_env=build_env) config = ConfigWrapper(version=version, yaml_config=create_load()()[0]) task = UpdateDocsTask(build_env=build_env, project=project, python_env=python_env, version=version, search=False, localmedia=False, config=config) # Mock out the separate calls to Popen using an iterable side_effect returns = [ (('', ''), 0), # sphinx-build html (('', ''), 0), # sphinx-build pdf (('', ''), 1), # latex (('', ''), 0), # makeindex (('', ''), 0), # latex ] mock_obj = mock.Mock() mock_obj.communicate.side_effect = [output for (output, status) in returns] type(mock_obj).returncode = mock.PropertyMock( side_effect=[status for (output, status) in returns]) self.mocks.popen.return_value = mock_obj with build_env: task.build_docs() self.assertEqual(self.mocks.popen.call_count, 7) self.assertTrue(build_env.failed)
class UpdateDocsTask(Task): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ max_retries = 5 default_retry_delay = (7 * 60) name = 'update_docs' def __init__(self, build_env=None, python_env=None, config=None, force=False, search=True, localmedia=True, build=None, project=None, version=None): self.build_env = build_env self.python_env = python_env self.build_force = force self.build_search = search self.build_localmedia = localmedia self.build = {} if build is not None: self.build = build self.version = {} if version is not None: self.version = version self.project = {} if project is not None: self.project = project if config is not None: self.config = config def _log(self, msg): log.info( LOG_TEMPLATE.format(project=self.project.slug, version=self.version.slug, msg=msg)) def run(self, pk, version_pk=None, build_pk=None, record=True, docker=False, search=True, force=False, localmedia=True, **__): # pylint: disable=arguments-differ self.project = self.get_project(pk) self.version = self.get_version(self.project, version_pk) self.build = self.get_build(build_pk) self.build_search = search self.build_localmedia = localmedia self.build_force = force self.config = None setup_successful = self.run_setup(record=record) if setup_successful: self.run_build(record=record, docker=docker) def run_setup(self, record=True): """Run setup in the local environment. Return True if successful. """ self.setup_env = LocalEnvironment(project=self.project, version=self.version, build=self.build, record=record) # Environment used for code checkout & initial configuration reading with self.setup_env: if self.project.skip: raise BuildEnvironmentError( _('Builds for this project are temporarily disabled')) try: self.setup_vcs() except vcs_support_utils.LockTimeout as e: self.retry(exc=e, throw=False) raise BuildEnvironmentError( 'Version locked, retrying in 5 minutes.', status_code=423) try: self.config = load_yaml_config(version=self.version) except ConfigError as e: raise BuildEnvironmentError( 'Problem parsing YAML configuration. {0}'.format(str(e))) if self.setup_env.failure or self.config is None: self._log('Failing build because of setup failure: %s' % self.setup_env.failure) # Send notification to users only if the build didn't fail because of # LockTimeout: this exception occurs when a build is triggered before the previous # one has finished (e.g. two webhooks, one after the other) if not isinstance(self.setup_env.failure, vcs_support_utils.LockTimeout): self.send_notifications() self.setup_env.update_build(state=BUILD_STATE_FINISHED) return False if self.setup_env.successful and not self.project.has_valid_clone: self.set_valid_clone() return True def run_build(self, docker=False, record=True): """Build the docs in an environment. If `docker` is True, or Docker is enabled by the settings.DOCKER_ENABLE setting, then build in a Docker environment. Otherwise build locally. """ env_vars = self.get_env_vars() if docker or settings.DOCKER_ENABLE: env_cls = DockerEnvironment else: env_cls = LocalEnvironment self.build_env = env_cls(project=self.project, version=self.version, build=self.build, record=record, environment=env_vars) # Environment used for building code, usually with Docker with self.build_env: if self.project.documentation_type == 'auto': self.update_documentation_type() python_env_cls = Virtualenv if self.config.use_conda: self._log('Using conda') python_env_cls = Conda self.python_env = python_env_cls(version=self.version, build_env=self.build_env, config=self.config) try: self.setup_environment() # TODO the build object should have an idea of these states, extend # the model to include an idea of these outcomes outcomes = self.build_docs() build_id = self.build.get('id') except SoftTimeLimitExceeded: raise BuildEnvironmentError(_('Build exited due to time out')) # Finalize build and update web servers if build_id: self.update_app_instances( html=bool(outcomes['html']), search=bool(outcomes['search']), localmedia=bool(outcomes['localmedia']), pdf=bool(outcomes['pdf']), epub=bool(outcomes['epub']), ) if self.build_env.failed: self.send_notifications() build_complete.send(sender=Build, build=self.build_env.build) self.build_env.update_build(state=BUILD_STATE_FINISHED) @staticmethod def get_project(project_pk): """Get project from API""" project_data = api_v2.project(project_pk).get() project = make_api_project(project_data) return project @staticmethod def get_version(project, version_pk): """Ensure we're using a sane version""" if version_pk: version_data = api_v2.version(version_pk).get() else: version_data = (api_v2.version( project.slug).get(slug=LATEST)['objects'][0]) return make_api_version(version_data) @staticmethod def get_build(build_pk): """ Retrieve build object from API :param build_pk: Build primary key """ build = {} if build_pk: build = api_v2.build(build_pk).get() return dict((key, val) for (key, val) in list(build.items()) if key not in ['project', 'version', 'resource_uri', 'absolute_uri']) def setup_vcs(self): """ Update the checkout of the repo to make sure it's the latest. This also syncs versions in the DB. :param build_env: Build environment """ self.setup_env.update_build(state=BUILD_STATE_CLONING) self._log(msg='Updating docs from VCS') try: update_imported_docs(self.version.pk) commit = self.project.vcs_repo(self.version.slug).commit if commit: self.build['commit'] = commit except ProjectImportError as e: log.error( LOG_TEMPLATE.format(project=self.project.slug, version=self.version.slug, msg=str(e)), exc_info=True, ) raise BuildEnvironmentError('Failed to import project: %s' % e, status_code=404) def get_env_vars(self): """Get bash environment variables used for all builder commands.""" env = { 'READTHEDOCS': True, 'READTHEDOCS_VERSION': self.version.slug, 'READTHEDOCS_PROJECT': self.project.slug } if self.config.use_conda: env.update({ 'CONDA_ENVS_PATH': os.path.join(self.project.doc_path, 'conda'), 'CONDA_DEFAULT_ENV': self.version.slug, 'BIN_PATH': os.path.join(self.project.doc_path, 'conda', self.version.slug, 'bin') }) else: env.update({ 'BIN_PATH': os.path.join(self.project.doc_path, 'envs', self.version.slug, 'bin') }) return env def set_valid_clone(self): """Mark on the project that it has been cloned properly.""" project_data = api_v2.project(self.project.pk).get() project_data['has_valid_clone'] = True api_v2.project(self.project.pk).put(project_data) self.project.has_valid_clone = True def update_documentation_type(self): """ Force Sphinx for 'auto' documentation type This used to determine the type and automatically set the documentation type to Sphinx for rST and Mkdocs for markdown. It now just forces Sphinx, due to markdown support. """ ret = 'sphinx' project_data = api_v2.project(self.project.pk).get() project_data['documentation_type'] = ret api_v2.project(self.project.pk).put(project_data) self.project.documentation_type = ret def update_app_instances(self, html=False, localmedia=False, search=False, pdf=False, epub=False): """Update application instances with build artifacts This triggers updates across application instances for html, pdf, epub, downloads, and search. Tasks are broadcast to all web servers from here. """ # Update version if we have successfully built HTML output try: if html: version = api_v2.version(self.version.pk) version.patch({ 'active': True, 'built': True, }) except HttpClientError as e: log.error( 'Updating version failed, skipping file sync: version=%s', self.version.pk, exc_info=True) else: # Broadcast finalization steps to web application instances broadcast(type='app', task=sync_files, args=[ self.project.pk, self.version.pk, ], kwargs=dict( hostname=socket.gethostname(), html=html, localmedia=localmedia, search=search, pdf=pdf, epub=epub, )) # Delayed tasks # TODO these should be chained on to the broadcast calls. The # broadcast calls could be lumped together into a promise, and on # task result, these next few tasks can be updated, also in a # chained fashion fileify.delay(self.version.pk, commit=self.build.get('commit')) update_search.delay(self.version.pk, commit=self.build.get('commit')) def setup_environment(self): """ Build the virtualenv and install the project into it. Always build projects with a virtualenv. :param build_env: Build environment to pass commands and execution through. """ self.build_env.update_build(state=BUILD_STATE_INSTALLING) with self.project.repo_nonblockinglock(version=self.version, max_lock_age=getattr( settings, 'REPO_LOCK_SECONDS', 30)): self.python_env.delete_existing_build_dir() self.python_env.setup_base() self.python_env.install_core_requirements() self.python_env.install_user_requirements() self.python_env.install_package() def build_docs(self): """Wrapper to all build functions Executes the necessary builds for this task and returns whether the build was successful or not. :returns: Build outcomes with keys for html, search, localmedia, pdf, and epub :rtype: dict """ self.build_env.update_build(state=BUILD_STATE_BUILDING) before_build.send(sender=self.version) outcomes = defaultdict(lambda: False) with self.project.repo_nonblockinglock(version=self.version, max_lock_age=getattr( settings, 'REPO_LOCK_SECONDS', 30)): outcomes['html'] = self.build_docs_html() outcomes['search'] = self.build_docs_search() outcomes['localmedia'] = self.build_docs_localmedia() outcomes['pdf'] = self.build_docs_pdf() outcomes['epub'] = self.build_docs_epub() after_build.send(sender=self.version) return outcomes def build_docs_html(self): """Build HTML docs""" html_builder = get_builder_class(self.project.documentation_type)( build_env=self.build_env, python_env=self.python_env, ) if self.build_force: html_builder.force() html_builder.append_conf() success = html_builder.build() if success: html_builder.move() # Gracefully attempt to move files via task on web workers. try: broadcast(type='app', task=move_files, args=[self.version.pk, socket.gethostname()], kwargs=dict(html=True)) except socket.error: # TODO do something here pass return success def build_docs_search(self): """Build search data with separate build""" if self.build_search: if self.project.is_type_mkdocs: return self.build_docs_class('mkdocs_json') if self.project.is_type_sphinx: return self.build_docs_class('sphinx_search') return False def build_docs_localmedia(self): """Get local media files with separate build""" if 'htmlzip' not in self.config.formats: return False if self.build_localmedia: if self.project.is_type_sphinx: return self.build_docs_class('sphinx_singlehtmllocalmedia') return False def build_docs_pdf(self): """Build PDF docs""" if ('pdf' not in self.config.formats or self.project.slug in HTML_ONLY or not self.project.is_type_sphinx): return False return self.build_docs_class('sphinx_pdf') def build_docs_epub(self): """Build ePub docs""" if ('epub' not in self.config.formats or self.project.slug in HTML_ONLY or not self.project.is_type_sphinx): return False return self.build_docs_class('sphinx_epub') def build_docs_class(self, builder_class): """Build docs with additional doc backends These steps are not necessarily required for the build to halt, so we only raise a warning exception here. A hard error will halt the build process. """ builder = get_builder_class(builder_class)(self.build_env, python_env=self.python_env) success = builder.build() builder.move() return success def send_notifications(self): """Send notifications on build failure""" send_notifications.delay(self.version.pk, build_pk=self.build['id'])
class UpdateDocsTask(Task): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ max_retries = 5 default_retry_delay = (7 * 60) name = 'update_docs' def __init__(self, build_env=None, python_env=None, config=None, force=False, search=True, localmedia=True, build=None, project=None, version=None): self.build_env = build_env self.python_env = python_env self.build_force = force self.build_search = search self.build_localmedia = localmedia self.build = {} if build is not None: self.build = build self.version = {} if version is not None: self.version = version self.project = {} if project is not None: self.project = project if config is not None: self.config = config def _log(self, msg): log.info(LOG_TEMPLATE .format(project=self.project.slug, version=self.version.slug, msg=msg)) def run(self, pk, version_pk=None, build_pk=None, record=True, docker=False, search=True, force=False, localmedia=True, **__): # pylint: disable=arguments-differ self.project = self.get_project(pk) self.version = self.get_version(self.project, version_pk) self.build = self.get_build(build_pk) self.build_search = search self.build_localmedia = localmedia self.build_force = force self.config = None setup_successful = self.run_setup(record=record) if setup_successful: self.run_build(record=record, docker=docker) def run_setup(self, record=True): """Run setup in the local environment. Return True if successful. """ self.setup_env = LocalEnvironment( project=self.project, version=self.version, build=self.build, record=record ) # Environment used for code checkout & initial configuration reading with self.setup_env: if self.project.skip: raise BuildEnvironmentError( _('Builds for this project are temporarily disabled')) try: self.setup_vcs() except vcs_support_utils.LockTimeout as e: self.retry(exc=e, throw=False) raise BuildEnvironmentError( 'Version locked, retrying in 5 minutes.', status_code=423 ) try: self.config = load_yaml_config(version=self.version) except ConfigError as e: raise BuildEnvironmentError( 'Problem parsing YAML configuration. {0}'.format(str(e)) ) if self.setup_env.failure or self.config is None: self._log('Failing build because of setup failure: %s' % self.setup_env.failure) # Send notification to users only if the build didn't fail because of # LockTimeout: this exception occurs when a build is triggered before the previous # one has finished (e.g. two webhooks, one after the other) if not isinstance(self.setup_env.failure, vcs_support_utils.LockTimeout): self.send_notifications() self.setup_env.update_build(state=BUILD_STATE_FINISHED) return False if self.setup_env.successful and not self.project.has_valid_clone: self.set_valid_clone() return True def run_build(self, docker=False, record=True): """Build the docs in an environment. If `docker` is True, or Docker is enabled by the settings.DOCKER_ENABLE setting, then build in a Docker environment. Otherwise build locally. """ env_vars = self.get_env_vars() if docker or settings.DOCKER_ENABLE: env_cls = DockerEnvironment else: env_cls = LocalEnvironment self.build_env = env_cls(project=self.project, version=self.version, build=self.build, record=record, environment=env_vars) # Environment used for building code, usually with Docker with self.build_env: if self.project.documentation_type == 'auto': self.update_documentation_type() python_env_cls = Virtualenv if self.config.use_conda: self._log('Using conda') python_env_cls = Conda self.python_env = python_env_cls(version=self.version, build_env=self.build_env, config=self.config) try: self.setup_environment() # TODO the build object should have an idea of these states, extend # the model to include an idea of these outcomes outcomes = self.build_docs() build_id = self.build.get('id') except SoftTimeLimitExceeded: raise BuildEnvironmentError(_('Build exited due to time out')) # Web Server Tasks if build_id: finish_build.delay( version_pk=self.version.pk, build_pk=build_id, hostname=socket.gethostname(), html=outcomes['html'], search=outcomes['search'], localmedia=outcomes['localmedia'], pdf=outcomes['pdf'], epub=outcomes['epub'], ) if self.build_env.failed: self.send_notifications() build_complete.send(sender=Build, build=self.build_env.build) self.build_env.update_build(state=BUILD_STATE_FINISHED) @staticmethod def get_project(project_pk): """Get project from API""" project_data = api_v1.project(project_pk).get() project = make_api_project(project_data) return project @staticmethod def get_version(project, version_pk): """Ensure we're using a sane version""" if version_pk: version_data = api_v1.version(version_pk).get() else: version_data = (api_v1 .version(project.slug) .get(slug=LATEST)['objects'][0]) return make_api_version(version_data) @staticmethod def get_build(build_pk): """ Retrieve build object from API :param build_pk: Build primary key """ build = {} if build_pk: build = api_v2.build(build_pk).get() return dict((key, val) for (key, val) in list(build.items()) if key not in ['project', 'version', 'resource_uri', 'absolute_uri']) def setup_vcs(self): """ Update the checkout of the repo to make sure it's the latest. This also syncs versions in the DB. :param build_env: Build environment """ self.setup_env.update_build(state=BUILD_STATE_CLONING) self._log(msg='Updating docs from VCS') try: update_imported_docs(self.version.pk) commit = self.project.vcs_repo(self.version.slug).commit if commit: self.build['commit'] = commit except ProjectImportError as e: log.error( LOG_TEMPLATE.format(project=self.project.slug, version=self.version.slug, msg=str(e)), exc_info=True, ) raise BuildEnvironmentError('Failed to import project: %s' % e, status_code=404) def get_env_vars(self): """Get bash environment variables used for all builder commands.""" env = { 'READTHEDOCS': True, 'READTHEDOCS_VERSION': self.version.slug, 'READTHEDOCS_PROJECT': self.project.slug } if self.config.use_conda: env.update({ 'CONDA_ENVS_PATH': os.path.join(self.project.doc_path, 'conda'), 'CONDA_DEFAULT_ENV': self.version.slug, 'BIN_PATH': os.path.join(self.project.doc_path, 'conda', self.version.slug, 'bin') }) else: env.update({ 'BIN_PATH': os.path.join(self.project.doc_path, 'envs', self.version.slug, 'bin') }) return env def set_valid_clone(self): """Mark on the project that it has been cloned properly.""" project_data = api_v2.project(self.project.pk).get() project_data['has_valid_clone'] = True api_v2.project(self.project.pk).put(project_data) self.project.has_valid_clone = True def update_documentation_type(self): """ Force Sphinx for 'auto' documentation type This used to determine the type and automatically set the documentation type to Sphinx for rST and Mkdocs for markdown. It now just forces Sphinx, due to markdown support. """ ret = 'sphinx' project_data = api_v2.project(self.project.pk).get() project_data['documentation_type'] = ret api_v2.project(self.project.pk).put(project_data) self.project.documentation_type = ret def setup_environment(self): """ Build the virtualenv and install the project into it. Always build projects with a virtualenv. :param build_env: Build environment to pass commands and execution through. """ self.build_env.update_build(state=BUILD_STATE_INSTALLING) with self.project.repo_nonblockinglock( version=self.version, max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): self.python_env.delete_existing_build_dir() self.python_env.setup_base() self.python_env.install_core_requirements() self.python_env.install_user_requirements() self.python_env.install_package() def build_docs(self): """Wrapper to all build functions Executes the necessary builds for this task and returns whether the build was successful or not. :returns: Build outcomes with keys for html, search, localmedia, pdf, and epub :rtype: dict """ self.build_env.update_build(state=BUILD_STATE_BUILDING) before_build.send(sender=self.version) outcomes = defaultdict(lambda: False) with self.project.repo_nonblockinglock( version=self.version, max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): outcomes['html'] = self.build_docs_html() outcomes['search'] = self.build_docs_search() outcomes['localmedia'] = self.build_docs_localmedia() outcomes['pdf'] = self.build_docs_pdf() outcomes['epub'] = self.build_docs_epub() after_build.send(sender=self.version) return outcomes def build_docs_html(self): """Build HTML docs""" html_builder = get_builder_class(self.project.documentation_type)( build_env=self.build_env, python_env=self.python_env, ) if self.build_force: html_builder.force() html_builder.append_conf() success = html_builder.build() if success: html_builder.move() # Gracefully attempt to move files via task on web workers. try: broadcast(type='app', task=move_files, args=[self.version.pk, socket.gethostname()], kwargs=dict(html=True) ) except socket.error: # TODO do something here pass return success def build_docs_search(self): """Build search data with separate build""" if self.build_search: if self.project.is_type_mkdocs: return self.build_docs_class('mkdocs_json') if self.project.is_type_sphinx: return self.build_docs_class('sphinx_search') return False def build_docs_localmedia(self): """Get local media files with separate build""" if 'htmlzip' not in self.config.formats: return False if self.build_localmedia: if self.project.is_type_sphinx: return self.build_docs_class('sphinx_singlehtmllocalmedia') return False def build_docs_pdf(self): """Build PDF docs""" if ('pdf' not in self.config.formats or self.project.slug in HTML_ONLY or not self.project.is_type_sphinx): return False return self.build_docs_class('sphinx_pdf') def build_docs_epub(self): """Build ePub docs""" if ('epub' not in self.config.formats or self.project.slug in HTML_ONLY or not self.project.is_type_sphinx): return False return self.build_docs_class('sphinx_epub') def build_docs_class(self, builder_class): """Build docs with additional doc backends These steps are not necessarily required for the build to halt, so we only raise a warning exception here. A hard error will halt the build process. """ builder = get_builder_class(builder_class)(self.build_env, python_env=self.python_env) success = builder.build() builder.move() return success def send_notifications(self): """Send notifications on build failure""" send_notifications.delay(self.version.pk, build_pk=self.build['id'])
class Symlink: """Base class for symlinking of projects.""" def __init__(self, project): self.project = project self.project_root = os.path.join( self.WEB_ROOT, project.slug, ) self.subproject_root = os.path.join( self.project_root, 'projects', ) self.environment = LocalEnvironment(project) self.sanity_check() def sanity_check(self): """ Make sure the project_root is the proper structure before continuing. This will leave it in the proper state for the single_project setting. """ if os.path.islink( self.project_root) and not self.project.single_version: log.info( constants.LOG_TEMPLATE, { 'project': self.project.slug, 'version': '', 'msg': 'Removing single version symlink', }) safe_unlink(self.project_root) safe_makedirs(self.project_root) elif (self.project.single_version and not os.path.islink(self.project_root) and os.path.exists(self.project_root)): shutil.rmtree(self.project_root) elif not os.path.lexists(self.project_root): safe_makedirs(self.project_root) # CNAME root directories if not os.path.lexists(self.CNAME_ROOT): safe_makedirs(self.CNAME_ROOT) if not os.path.lexists(self.PROJECT_CNAME_ROOT): safe_makedirs(self.PROJECT_CNAME_ROOT) def run(self): """ Create proper symlinks in the right order. Since we have a small nest of directories and symlinks, the ordering of these calls matter, so we provide this helper to make life easier. """ # Outside of the web root self.symlink_cnames() # Build structure inside symlink zone if self.project.single_version: self.symlink_single_version() self.symlink_subprojects() else: self.symlink_translations() self.symlink_subprojects() self.symlink_versions() def symlink_cnames(self, domain=None): """ Symlink project CNAME domains. Link from HOME/$CNAME_ROOT/<cname> -> HOME/$WEB_ROOT/<project> Also give cname -> project link Link from HOME/public_cname_project/<cname> -> HOME/<project>/ """ if domain: domains = [domain] else: domains = Domain.objects.filter(project=self.project).values_list( 'domain', flat=True) for dom in domains: log_msg = 'Symlinking CNAME: {} -> {}'.format( dom, self.project.slug, ) log.debug(constants.LOG_TEMPLATE, { 'project': self.project.slug, 'version': '', 'msg': log_msg, }) # CNAME to doc root symlink = os.path.join(self.CNAME_ROOT, dom) self.environment.run('ln', '-nsf', self.project_root, symlink) # Project symlink project_cname_symlink = os.path.join( self.PROJECT_CNAME_ROOT, dom, ) self.environment.run( 'ln', '-nsf', self.project.doc_path, project_cname_symlink, ) def remove_symlink_cname(self, domain): """ Remove CNAME symlink. :param domain: domain for which symlink is to be removed :type domain: str """ log_msg = 'Removing symlink for CNAME {}'.format(domain) log.debug(constants.LOG_TEMPLATE, { 'project': self.project.slug, 'version': '', 'msg': log_msg, }) symlink = os.path.join(self.CNAME_ROOT, domain) safe_unlink(symlink) def symlink_subprojects(self): """ Symlink project subprojects. Link from $WEB_ROOT/projects/<project> -> $WEB_ROOT/<project> """ subprojects = set() rels = self.get_subprojects() if rels.count(): # Don't create the `projects/` directory unless subprojects exist. if not os.path.exists(self.subproject_root): safe_makedirs(self.subproject_root) for rel in rels: # A mapping of slugs for the subproject URL to the actual built # documentation from_to = OrderedDict({rel.child.slug: rel.child.slug}) subprojects.add(rel.child.slug) if rel.alias: from_to[rel.alias] = rel.child.slug subprojects.add(rel.alias) for from_slug, to_slug in list(from_to.items()): log_msg = 'Symlinking subproject: {} -> {}'.format( from_slug, to_slug, ) log.debug(constants.LOG_TEMPLATE, { 'project': self.project.slug, 'version': '', 'msg': log_msg, }) symlink = os.path.join(self.subproject_root, from_slug) docs_dir = os.path.join( self.WEB_ROOT, to_slug, ) symlink_dir = os.sep.join(symlink.split(os.path.sep)[:-1]) if not os.path.lexists(symlink_dir): safe_makedirs(symlink_dir) # TODO this should use os.symlink, not a call to shell. For now, # this passes command as a list to be explicit about escaping # characters like spaces. result = self.environment.run('ln', '-nsf', docs_dir, symlink) if result.exit_code > 0: log.error( 'Could not symlink path: status=%d error=%s', result.exit_code, result.error, ) # Remove old symlinks if os.path.exists(self.subproject_root): for subproj in os.listdir(self.subproject_root): if subproj not in subprojects: safe_unlink(os.path.join(self.subproject_root, subproj)) def symlink_translations(self): """ Symlink project translations. Link from $WEB_ROOT/<project>/<language>/ -> $WEB_ROOT/<translation>/<language>/ """ translations = {} for trans in self.get_translations(): translations[trans.language] = trans.slug # Make sure the language directory is a directory language_dir = os.path.join(self.project_root, self.project.language) if os.path.islink(language_dir): safe_unlink(language_dir) if not os.path.lexists(language_dir): safe_makedirs(language_dir) for (language, slug) in list(translations.items()): log_msg = 'Symlinking translation: {}->{}'.format(language, slug) log.debug(constants.LOG_TEMPLATE, { 'project': self.project.slug, 'version': '', 'msg': log_msg, }) symlink = os.path.join(self.project_root, language) docs_dir = os.path.join(self.WEB_ROOT, slug, language) self.environment.run('ln', '-nsf', docs_dir, symlink) # Remove old symlinks for lang in os.listdir(self.project_root): if (lang not in translations and lang not in ['projects', self.project.language]): to_delete = os.path.join(self.project_root, lang) if os.path.islink(to_delete): safe_unlink(to_delete) else: shutil.rmtree(to_delete) def symlink_single_version(self): """ Symlink project single version. Link from: $WEB_ROOT/<project> -> HOME/user_builds/<project>/rtd-builds/latest/ """ version = self.get_default_version() # Clean up symlinks symlink = self.project_root if os.path.islink(symlink): safe_unlink(symlink) elif os.path.exists(symlink): shutil.rmtree(symlink) # Create symlink if version is not None: docs_dir = os.path.join( settings.DOCROOT, self.project.slug, 'rtd-builds', version.slug, ) self.environment.run('ln', '-nsf', docs_dir, symlink) def symlink_versions(self): """ Symlink project's versions. Link from $WEB_ROOT/<project>/<language>/<version>/ -> HOME/user_builds/<project>/rtd-builds/<version> """ versions = set() version_dir = os.path.join( self.WEB_ROOT, self.project.slug, self.project.language, ) # Include active public versions, # as well as public versions that are built but not active, for archived versions version_queryset = self.get_version_queryset() if version_queryset.count(): if not os.path.exists(version_dir): safe_makedirs(version_dir) for version in version_queryset: log_msg = 'Symlinking Version: {}'.format(version) log.debug(constants.LOG_TEMPLATE, { 'project': self.project.slug, 'version': '', 'msg': log_msg, }) symlink = os.path.join(version_dir, version.slug) docs_dir = os.path.join( settings.DOCROOT, self.project.slug, 'rtd-builds', version.slug, ) self.environment.run('ln', '-nsf', docs_dir, symlink) versions.add(version.slug) # Remove old symlinks if os.path.exists(version_dir): for old_ver in os.listdir(version_dir): if old_ver not in versions: safe_unlink(os.path.join(version_dir, old_ver)) def get_default_version(self): """Look up project default version, return None if not found.""" default_version = self.project.get_default_version() try: return self.get_version_queryset().get(slug=default_version) except Version.DoesNotExist: return None
class Symlink: """Base class for symlinking of projects.""" def __init__(self, project): self.project = project self.project_root = os.path.join( self.WEB_ROOT, project.slug, ) self.subproject_root = os.path.join( self.project_root, 'projects', ) self.environment = LocalEnvironment(project) self.sanity_check() def sanity_check(self): """ Make sure the project_root is the proper structure before continuing. This will leave it in the proper state for the single_project setting. """ if os.path.islink(self.project_root) and not self.project.single_version: log.info( constants.LOG_TEMPLATE, { 'project': self.project.slug, 'version': '', 'msg': 'Removing single version symlink', } ) safe_unlink(self.project_root) safe_makedirs(self.project_root) elif (self.project.single_version and not os.path.islink(self.project_root) and os.path.exists(self.project_root)): shutil.rmtree(self.project_root) elif not os.path.lexists(self.project_root): safe_makedirs(self.project_root) # CNAME root directories if not os.path.lexists(self.CNAME_ROOT): safe_makedirs(self.CNAME_ROOT) if not os.path.lexists(self.PROJECT_CNAME_ROOT): safe_makedirs(self.PROJECT_CNAME_ROOT) def run(self): """ Create proper symlinks in the right order. Since we have a small nest of directories and symlinks, the ordering of these calls matter, so we provide this helper to make life easier. """ # Outside of the web root self.symlink_cnames() # Build structure inside symlink zone if self.project.single_version: self.symlink_single_version() self.symlink_subprojects() else: self.symlink_translations() self.symlink_subprojects() self.symlink_versions() def symlink_cnames(self, domain=None): """ Symlink project CNAME domains. Link from HOME/$CNAME_ROOT/<cname> -> HOME/$WEB_ROOT/<project> Also give cname -> project link Link from HOME/public_cname_project/<cname> -> HOME/<project>/ """ if domain: domains = [domain] else: domains = Domain.objects.filter(project=self.project).values_list('domain', flat=True) for dom in domains: log_msg = 'Symlinking CNAME: {} -> {}'.format( dom, self.project.slug, ) log.debug( constants.LOG_TEMPLATE, { 'project': self.project.slug, 'version': '', 'msg': log_msg, } ) # CNAME to doc root symlink = os.path.join(self.CNAME_ROOT, dom) self.environment.run('ln', '-nsf', self.project_root, symlink) # Project symlink project_cname_symlink = os.path.join( self.PROJECT_CNAME_ROOT, dom, ) self.environment.run( 'ln', '-nsf', self.project.doc_path, project_cname_symlink, ) def remove_symlink_cname(self, domain): """ Remove CNAME symlink. :param domain: domain for which symlink is to be removed :type domain: str """ log_msg = 'Removing symlink for CNAME {}'.format(domain) log.debug( constants.LOG_TEMPLATE, { 'project': self.project.slug, 'version': '', 'msg': log_msg, } ) symlink = os.path.join(self.CNAME_ROOT, domain) safe_unlink(symlink) def symlink_subprojects(self): """ Symlink project subprojects. Link from $WEB_ROOT/projects/<project> -> $WEB_ROOT/<project> """ subprojects = set() rels = self.get_subprojects() if rels.count(): # Don't create the `projects/` directory unless subprojects exist. if not os.path.exists(self.subproject_root): safe_makedirs(self.subproject_root) for rel in rels: # A mapping of slugs for the subproject URL to the actual built # documentation from_to = OrderedDict({rel.child.slug: rel.child.slug}) subprojects.add(rel.child.slug) if rel.alias: from_to[rel.alias] = rel.child.slug subprojects.add(rel.alias) for from_slug, to_slug in list(from_to.items()): log_msg = 'Symlinking subproject: {} -> {}'.format( from_slug, to_slug, ) log.debug( constants.LOG_TEMPLATE, { 'project': self.project.slug, 'version': '', 'msg': log_msg, } ) symlink = os.path.join(self.subproject_root, from_slug) docs_dir = os.path.join( self.WEB_ROOT, to_slug, ) symlink_dir = os.sep.join(symlink.split(os.path.sep)[:-1]) if not os.path.lexists(symlink_dir): safe_makedirs(symlink_dir) # TODO this should use os.symlink, not a call to shell. For now, # this passes command as a list to be explicit about escaping # characters like spaces. result = self.environment.run('ln', '-nsf', docs_dir, symlink) if result.exit_code > 0: log.error( 'Could not symlink path: status=%d error=%s', result.exit_code, result.error, ) # Remove old symlinks if os.path.exists(self.subproject_root): for subproj in os.listdir(self.subproject_root): if subproj not in subprojects: safe_unlink(os.path.join(self.subproject_root, subproj)) def symlink_translations(self): """ Symlink project translations. Link from $WEB_ROOT/<project>/<language>/ -> $WEB_ROOT/<translation>/<language>/ """ translations = {} for trans in self.get_translations(): translations[trans.language] = trans.slug # Make sure the language directory is a directory language_dir = os.path.join(self.project_root, self.project.language) if os.path.islink(language_dir): safe_unlink(language_dir) if not os.path.lexists(language_dir): safe_makedirs(language_dir) for (language, slug) in list(translations.items()): log_msg = 'Symlinking translation: {}->{}'.format(language, slug) log.debug( constants.LOG_TEMPLATE, { 'project': self.project.slug, 'version': '', 'msg': log_msg, } ) symlink = os.path.join(self.project_root, language) docs_dir = os.path.join(self.WEB_ROOT, slug, language) self.environment.run('ln', '-nsf', docs_dir, symlink) # Remove old symlinks for lang in os.listdir(self.project_root): if (lang not in translations and lang not in ['projects', self.project.language]): to_delete = os.path.join(self.project_root, lang) if os.path.islink(to_delete): safe_unlink(to_delete) else: shutil.rmtree(to_delete) def symlink_single_version(self): """ Symlink project single version. Link from: $WEB_ROOT/<project> -> HOME/user_builds/<project>/rtd-builds/latest/ """ version = self.get_default_version() # Clean up symlinks symlink = self.project_root if os.path.islink(symlink): safe_unlink(symlink) elif os.path.exists(symlink): shutil.rmtree(symlink) # Create symlink if version is not None: docs_dir = os.path.join( settings.DOCROOT, self.project.slug, 'rtd-builds', version.slug, ) self.environment.run('ln', '-nsf', docs_dir, symlink) def symlink_versions(self): """ Symlink project's versions. Link from $WEB_ROOT/<project>/<language>/<version>/ -> HOME/user_builds/<project>/rtd-builds/<version> """ versions = set() version_dir = os.path.join( self.WEB_ROOT, self.project.slug, self.project.language, ) # Include active public versions, # as well as public versions that are built but not active, for archived versions version_queryset = self.get_version_queryset() if version_queryset.count(): if not os.path.exists(version_dir): safe_makedirs(version_dir) for version in version_queryset: log_msg = 'Symlinking Version: {}'.format(version) log.debug( constants.LOG_TEMPLATE, { 'project': self.project.slug, 'version': '', 'msg': log_msg, } ) symlink = os.path.join(version_dir, version.slug) docs_dir = os.path.join( settings.DOCROOT, self.project.slug, 'rtd-builds', version.slug, ) self.environment.run('ln', '-nsf', docs_dir, symlink) versions.add(version.slug) # Remove old symlinks if os.path.exists(version_dir): for old_ver in os.listdir(version_dir): if old_ver not in versions: safe_unlink(os.path.join(version_dir, old_ver)) def get_default_version(self): """Look up project default version, return None if not found.""" default_version = self.project.get_default_version() try: return self.get_version_queryset().get(slug=default_version) except Version.DoesNotExist: return None
class UpdateDocsTask(Task): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ max_retries = 5 default_retry_delay = (7 * 60) name = __name__ + '.update_docs' def __init__(self, build_env=None, python_env=None, config=None, force=False, search=True, localmedia=True, build=None, project=None, version=None): self.build_env = build_env self.python_env = python_env self.build_force = force self.build_search = search self.build_localmedia = localmedia self.build = {} if build is not None: self.build = build self.version = {} if version is not None: self.version = version self.project = {} if project is not None: self.project = project if config is not None: self.config = config def _log(self, msg): log.info(LOG_TEMPLATE .format(project=self.project.slug, version=self.version.slug, msg=msg)) # pylint: disable=arguments-differ def run(self, pk, version_pk=None, build_pk=None, record=True, docker=False, search=True, force=False, localmedia=True, **__): """ Run a documentation build. This is fully wrapped in exception handling to account for a number of failure cases. We first run a few commands in a local build environment, but do not report on environment success. This avoids a flicker on the build output page where the build is marked as finished in between the local environment steps and the docker build steps. If a failure is raised, or the build is not successful, return ``False``, otherwise, ``True``. Unhandled exceptions raise a generic user facing error, which directs the user to bug us. It is therefore a benefit to have as few unhandled errors as possible. :param pk int: Project id :param version_pk int: Project Version id :param build_pk int: Build id :param record bool: record a build object in the database :param docker bool: use docker to build the project :param search bool: update search :param force bool: force Sphinx build :param localmedia: update localmedia :returns: if build was successful or not :rtype: bool """ try: self.project = self.get_project(pk) self.version = self.get_version(self.project, version_pk) self.build = self.get_build(build_pk) self.build_search = search self.build_localmedia = localmedia self.build_force = force self.config = None setup_successful = self.run_setup(record=record) if not setup_successful: return False # Catch unhandled errors in the setup step except Exception as e: # noqa log.exception( 'An unhandled exception was raised during build setup', extra={'tags': {'build': build_pk}} ) self.setup_env.failure = BuildEnvironmentError( BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format( build_id=build_pk, ) ) self.setup_env.update_build(BUILD_STATE_FINISHED) return False else: # No exceptions in the setup step, catch unhandled errors in the # build steps try: self.run_build(record=record, docker=docker) except Exception as e: # noqa log.exception( 'An unhandled exception was raised during project build', extra={'tags': {'build': build_pk}} ) self.build_env.failure = BuildEnvironmentError( BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format( build_id=build_pk, ) ) self.build_env.update_build(BUILD_STATE_FINISHED) return False return True def run_setup(self, record=True): """ Run setup in the local environment. Return True if successful. """ self.setup_env = LocalEnvironment( project=self.project, version=self.version, build=self.build, record=record, update_on_success=False, ) # Environment used for code checkout & initial configuration reading with self.setup_env: if self.project.skip: raise BuildEnvironmentError( _('Builds for this project are temporarily disabled')) try: self.setup_vcs() except vcs_support_utils.LockTimeout as e: self.retry(exc=e, throw=False) raise BuildEnvironmentError( 'Version locked, retrying in 5 minutes.', status_code=423 ) try: self.config = load_yaml_config(version=self.version) except ConfigError as e: raise BuildEnvironmentError( 'Problem parsing YAML configuration. {0}'.format(str(e)) ) if self.setup_env.failure or self.config is None: self._log('Failing build because of setup failure: %s' % self.setup_env.failure) # Send notification to users only if the build didn't fail because of # LockTimeout: this exception occurs when a build is triggered before the previous # one has finished (e.g. two webhooks, one after the other) if not isinstance(self.setup_env.failure, vcs_support_utils.LockTimeout): self.send_notifications() return False if self.setup_env.successful and not self.project.has_valid_clone: self.set_valid_clone() return True def run_build(self, docker=False, record=True): """ Build the docs in an environment. If `docker` is True, or Docker is enabled by the settings.DOCKER_ENABLE setting, then build in a Docker environment. Otherwise build locally. """ env_vars = self.get_env_vars() if docker or settings.DOCKER_ENABLE: env_cls = DockerEnvironment else: env_cls = LocalEnvironment self.build_env = env_cls(project=self.project, version=self.version, build=self.build, record=record, environment=env_vars) # Environment used for building code, usually with Docker with self.build_env: if self.project.documentation_type == 'auto': self.update_documentation_type() python_env_cls = Virtualenv if self.config.use_conda: self._log('Using conda') python_env_cls = Conda self.python_env = python_env_cls(version=self.version, build_env=self.build_env, config=self.config) try: self.setup_environment() # TODO the build object should have an idea of these states, extend # the model to include an idea of these outcomes outcomes = self.build_docs() build_id = self.build.get('id') except SoftTimeLimitExceeded: raise BuildEnvironmentError(_('Build exited due to time out')) # Finalize build and update web servers if build_id: self.update_app_instances( html=bool(outcomes['html']), search=bool(outcomes['search']), localmedia=bool(outcomes['localmedia']), pdf=bool(outcomes['pdf']), epub=bool(outcomes['epub']), ) else: log.warning('No build ID, not syncing files') if self.build_env.failed: self.send_notifications() build_complete.send(sender=Build, build=self.build_env.build) @staticmethod def get_project(project_pk): """Get project from API.""" project_data = api_v2.project(project_pk).get() return APIProject(**project_data) @staticmethod def get_version(project, version_pk): """Ensure we're using a sane version.""" if version_pk: version_data = api_v2.version(version_pk).get() else: version_data = (api_v2 .version(project.slug) .get(slug=LATEST)['objects'][0]) return APIVersion(**version_data) @staticmethod def get_build(build_pk): """ Retrieve build object from API. :param build_pk: Build primary key """ build = {} if build_pk: build = api_v2.build(build_pk).get() return dict((key, val) for (key, val) in list(build.items()) if key not in ['project', 'version', 'resource_uri', 'absolute_uri']) def setup_vcs(self): """ Update the checkout of the repo to make sure it's the latest. This also syncs versions in the DB. :param build_env: Build environment """ self.setup_env.update_build(state=BUILD_STATE_CLONING) self._log(msg='Updating docs from VCS') update_imported_docs(self.version.pk) commit = self.project.vcs_repo(self.version.slug).commit if commit: self.build['commit'] = commit def get_env_vars(self): """Get bash environment variables used for all builder commands.""" env = { 'READTHEDOCS': True, 'READTHEDOCS_VERSION': self.version.slug, 'READTHEDOCS_PROJECT': self.project.slug } if self.config.use_conda: env.update({ 'CONDA_ENVS_PATH': os.path.join(self.project.doc_path, 'conda'), 'CONDA_DEFAULT_ENV': self.version.slug, 'BIN_PATH': os.path.join(self.project.doc_path, 'conda', self.version.slug, 'bin') }) else: env.update({ 'BIN_PATH': os.path.join(self.project.doc_path, 'envs', self.version.slug, 'bin') }) return env def set_valid_clone(self): """Mark on the project that it has been cloned properly.""" project_data = api_v2.project(self.project.pk).get() project_data['has_valid_clone'] = True api_v2.project(self.project.pk).put(project_data) self.project.has_valid_clone = True def update_documentation_type(self): """ Force Sphinx for 'auto' documentation type. This used to determine the type and automatically set the documentation type to Sphinx for rST and Mkdocs for markdown. It now just forces Sphinx, due to markdown support. """ ret = 'sphinx' project_data = api_v2.project(self.project.pk).get() project_data['documentation_type'] = ret api_v2.project(self.project.pk).put(project_data) self.project.documentation_type = ret def update_app_instances(self, html=False, localmedia=False, search=False, pdf=False, epub=False): """ Update application instances with build artifacts. This triggers updates across application instances for html, pdf, epub, downloads, and search. Tasks are broadcast to all web servers from here. """ # Update version if we have successfully built HTML output try: if html: version = api_v2.version(self.version.pk) version.patch({ 'active': True, 'built': True, }) except HttpClientError: log.exception('Updating version failed, skipping file sync: version=%s' % self.version) # Broadcast finalization steps to web application instances broadcast( type='app', task=sync_files, args=[ self.project.pk, self.version.pk, ], kwargs=dict( hostname=socket.gethostname(), html=html, localmedia=localmedia, search=search, pdf=pdf, epub=epub, ), callback=sync_callback.s(version_pk=self.version.pk, commit=self.build['commit']), ) def setup_environment(self): """ Build the virtualenv and install the project into it. Always build projects with a virtualenv. :param build_env: Build environment to pass commands and execution through. """ self.build_env.update_build(state=BUILD_STATE_INSTALLING) with self.project.repo_nonblockinglock( version=self.version, max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): self.python_env.delete_existing_build_dir() self.python_env.setup_base() self.python_env.install_core_requirements() self.python_env.install_user_requirements() self.python_env.install_package() def build_docs(self): """ Wrapper to all build functions. Executes the necessary builds for this task and returns whether the build was successful or not. :returns: Build outcomes with keys for html, search, localmedia, pdf, and epub :rtype: dict """ self.build_env.update_build(state=BUILD_STATE_BUILDING) before_build.send(sender=self.version) outcomes = defaultdict(lambda: False) with self.project.repo_nonblockinglock( version=self.version, max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): outcomes['html'] = self.build_docs_html() outcomes['search'] = self.build_docs_search() outcomes['localmedia'] = self.build_docs_localmedia() outcomes['pdf'] = self.build_docs_pdf() outcomes['epub'] = self.build_docs_epub() after_build.send(sender=self.version) return outcomes def build_docs_html(self): """Build HTML docs.""" html_builder = get_builder_class(self.project.documentation_type)( build_env=self.build_env, python_env=self.python_env, ) if self.build_force: html_builder.force() html_builder.append_conf() success = html_builder.build() if success: html_builder.move() # Gracefully attempt to move files via task on web workers. try: broadcast(type='app', task=move_files, args=[self.version.pk, socket.gethostname()], kwargs=dict(html=True) ) except socket.error: log.exception('move_files task has failed on socket error.') return success def build_docs_search(self): """Build search data with separate build.""" if self.build_search: if self.project.is_type_mkdocs: return self.build_docs_class('mkdocs_json') if self.project.is_type_sphinx: return self.build_docs_class('sphinx_search') return False def build_docs_localmedia(self): """Get local media files with separate build.""" if 'htmlzip' not in self.config.formats: return False if self.build_localmedia: if self.project.is_type_sphinx: return self.build_docs_class('sphinx_singlehtmllocalmedia') return False def build_docs_pdf(self): """Build PDF docs.""" if ('pdf' not in self.config.formats or self.project.slug in HTML_ONLY or not self.project.is_type_sphinx): return False return self.build_docs_class('sphinx_pdf') def build_docs_epub(self): """Build ePub docs.""" if ('epub' not in self.config.formats or self.project.slug in HTML_ONLY or not self.project.is_type_sphinx): return False return self.build_docs_class('sphinx_epub') def build_docs_class(self, builder_class): """ Build docs with additional doc backends. These steps are not necessarily required for the build to halt, so we only raise a warning exception here. A hard error will halt the build process. """ builder = get_builder_class(builder_class)(self.build_env, python_env=self.python_env) success = builder.build() builder.move() return success def send_notifications(self): """Send notifications on build failure.""" send_notifications.delay(self.version.pk, build_pk=self.build['id'])