def fileify(version_pk, commit): """ Create ImportedFile objects for all of a version's files. This is a prereq for indexing the docs for search. It also causes celery-haystack to kick off an index of the file. """ version = Version.objects.get(pk=version_pk) project = version.project if not project.cdn_enabled: return if not commit: log.info( LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg=("Imported File not being built because no commit " "information"), ) ) path = project.rtd_build_path(version.slug) if path: log.info(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Creating ImportedFiles")) _manage_imported_files(version, path, commit) else: log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg="No ImportedFile files"))
def symlink_cnames(version): """ OLD Link from HOME/user_builds/cnames/<cname> -> HOME/user_builds/<project>/rtd-builds/ NEW Link from HOME/user_builds/cnametoproject/<cname> -> HOME/user_builds/<project>/ """ try: redis_conn = redis.Redis(**settings.REDIS) cnames = redis_conn.smembers('rtd_slug:v1:%s' % version.project.slug) except redis.ConnectionError: log.error(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg='Failed to symlink cnames, Redis error.'), exc_info=True) return for cname in cnames: log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Symlinking CNAME: %s" % cname)) docs_dir = version.project.rtd_build_path(version.slug) # Chop off the version from the end. docs_dir = '/'.join(docs_dir.split('/')[:-1]) # Old symlink location -- Keep this here til we change nginx over symlink = version.project.cnames_symlink_path(cname) run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1])) run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink)) # New symlink location new_docs_dir = version.project.doc_path new_cname_symlink = os.path.join(getattr(settings, 'SITE_ROOT'), 'cnametoproject', cname) run_on_app_servers('mkdir -p %s' % '/'.join(new_cname_symlink.split('/')[:-1])) run_on_app_servers('ln -nsf %s %s' % (new_docs_dir, new_cname_symlink))
def setup_vcs(self): """ Update the checkout of the repo to make sure it's the latest. This also syncs versions in the DB. :param build_env: Build environment """ self.setup_env.update_build(state=BUILD_STATE_CLONING) self._log(msg='Updating docs from VCS') try: update_imported_docs(self.version.pk) commit = self.project.vcs_repo(self.version.slug).commit if commit: self.build['commit'] = commit except ProjectImportError as e: log.error( LOG_TEMPLATE.format(project=self.project.slug, version=self.version.slug, msg=str(e)), exc_info=True, ) raise BuildEnvironmentError('Failed to import project', status_code=404)
def record_pdf(api, record, results, state, version): if not record or 'sphinx' not in version.project.documentation_type: return None try: if 'pdf' in results: pdf_exit = results['pdf'][0] pdf_success = pdf_exit == 0 pdf_output = results['pdf'][1] pdf_error = results['pdf'][2] else: pdf_exit = 999 pdf_success = False pdf_output = pdf_error = "PDF Failed" pdf_output = pdf_output.decode('utf-8', 'ignore') pdf_error = pdf_error.decode('utf-8', 'ignore') api.build.post(dict( state=state, project='/api/v1/project/%s/' % version.project.pk, version='/api/v1/version/%s/' % version.pk, success=pdf_success, type='pdf', output=pdf_output, error=pdf_error, exit_code=pdf_exit, )) except Exception: log.error(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to post a new build"), exc_info=True)
def _log_warning(self, msg): # :'( log.warning(LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg=msg, ))
def get_client(self): """Create Docker client connection.""" try: if self.client is None: self.client = APIClient( base_url=self.docker_socket, version=DOCKER_VERSION, ) return self.client except DockerException as e: log.exception( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg='Could not connect to Docker API', ), ) # We don't raise an error here mentioning Docker, that is a # technical detail that the user can't resolve on their own. # Instead, give the user a generic failure raise BuildEnvironmentError( BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format( build_id=self.build['id'], ) )
def symlink_subprojects(version): """ Link from HOME/user_builds/project/subprojects/<project> -> HOME/user_builds/<project>/rtd-builds/ """ # Subprojects if getattr(settings, 'DONT_HIT_DB', True): subproject_slugs = [data['slug'] for data in api.project(version.project.pk).subprojects.get()['subprojects']] else: rels = version.project.subprojects.all() subproject_slugs = [rel.child.slug for rel in rels] for slug in subproject_slugs: slugs = [slug] if '_' in slugs[0]: slugs.append(slugs[0].replace('_', '-')) for subproject_slug in slugs: log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Symlinking subproject: %s" % subproject_slug)) # The directory for this specific subproject symlink = version.project.subprojects_symlink_path(subproject_slug) run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1])) # Where the actual docs live docs_dir = os.path.join(settings.DOCROOT, subproject_slug, 'rtd-builds') run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
def setup_base(self): conda_env_path = os.path.join(self.project.doc_path, 'conda') version_path = os.path.join(conda_env_path, self.version.slug) if os.path.exists(version_path): # Re-create conda directory each time to keep fresh state log.info( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg='Removing existing conda directory', ), ) shutil.rmtree(version_path) self.build_env.run( 'conda', 'env', 'create', '--quiet', '--name', self.version.slug, '--file', self.config.conda.environment, bin_path=None, # Don't use conda bin that doesn't exist yet cwd=self.checkout_path, )
def _log_warning(self, msg): log.warning( LOG_TEMPLATE.format( project=self.project.slug, version='latest', msg=msg, ))
def email_notification(version, build, email): """Send email notifications for build failure :param version: :py:class:`Version` instance that failed :param build: :py:class:`Build` instance that failed :param email: Email recipient address """ log.debug( LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="sending email to: %s" % email) ) context = { "version": version, "project": version.project, "build": build, "build_url": "https://{0}{1}".format( getattr(settings, "PRODUCTION_DOMAIN", "readthedocs.org"), build.get_absolute_url() ), "unsub_url": "https://{0}{1}".format( getattr(settings, "PRODUCTION_DOMAIN", "readthedocs.org"), reverse("projects_notifications", args=[version.project.slug]), ), } if build.commit: title = _("Failed: {project.name} ({commit})").format(commit=build.commit[:8], **context) else: title = _("Failed: {project.name} ({version.verbose_name})").format(**context) send_email( email, title, template="projects/email/build_failed.txt", template_html="projects/email/build_failed.html", context=context, )
def symlink_cnames(project): """Symlink project CNAME domains OLD Link from HOME/user_builds/cnames/<cname> -> HOME/user_builds/<project>/rtd-builds/ NEW Link from HOME/user_builds/cnametoproject/<cname> -> HOME/user_builds/<project>/ """ domains = Domain.objects.filter(project=project, cname=True) for domain in domains: log.debug(LOG_TEMPLATE.format( project=project.slug, version=project.get_default_version(), msg="Symlinking CNAME: %s" % domain.clean_host) ) docs_dir = project.rtd_build_path() # Chop off the version from the end. docs_dir = '/'.join(docs_dir.split('/')[:-1]) # Old symlink location -- Keep this here til we change nginx over symlink = project.cnames_symlink_path(domain.clean_host) run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1])) run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink)) # New symlink location new_docs_dir = project.doc_path new_cname_symlink = os.path.join( getattr(settings, 'SITE_ROOT'), 'cnametoproject', domain.clean_host, ) run_on_app_servers('mkdir -p %s' % '/'.join(new_cname_symlink.split('/')[:-1])) run_on_app_servers('ln -nsf %s %s' % (new_docs_dir, new_cname_symlink))
def email_notification(version, build, email): """Send email notifications for build failure :param version: :py:cls:`Version` instance that failed :param build: :py:cls:`Build` instance that failed :param email: Email recipient address """ log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg='sending email to: %s' % email)) context = {'version': version, 'project': version.project, 'build': build, 'build_url': 'https://{0}{1}'.format( getattr(settings, 'PRODUCTION_DOMAIN', 'readthedocs.org'), build.get_absolute_url()), 'unsub_url': 'https://{0}{1}'.format( getattr(settings, 'PRODUCTION_DOMAIN', 'readthedocs.org'), reverse('projects_notifications', args=[version.project.slug])), } if build.commit: title = _('Failed: {project.name} ({commit})').format(commit=build.commit[:8], **context) else: title = _('Failed: {project.name} ({version.verbose_name})').format(**context) send_email( email, title, template='projects/email/build_failed.txt', template_html='projects/email/build_failed.html', context=context )
def __exit__(self, exc_type, exc_value, tb): ret = self.handle_exception(exc_type, exc_value, tb) self.update_build(BUILD_STATE_FINISHED) log.info( LOG_TEMPLATE.format(project=self.project.slug, version=self.version.slug, msg='Build finished')) return ret
def remove_symlink_single_version(version): """Remove single_version symlink""" log.debug( LOG_TEMPLATE.format(project=version.project.slug, version=version.project.get_default_version(), msg="Removing symlink for single_version")) symlink = version.project.single_version_symlink_path() run_on_app_servers('rm -f %s' % symlink)
def _log_warning(self, msg): # :'( log.warning( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg=msg, ))
def remove_symlink_single_version(version): """Remove single_version symlink""" log.debug(LOG_TEMPLATE.format( project=version.project.slug, version=version.project.get_default_version(), msg="Removing symlink for single_version") ) symlink = version.project.single_version_symlink_path() run_on_app_servers('rm -f %s' % symlink)
def setup_vcs(version, build): """ Update the checkout of the repo to make sure it's the latest. This also syncs versions in the DB. """ log.info(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg='Updating docs from VCS')) try: update_output = update_imported_docs(version.pk) commit = version.project.vcs_repo(version.slug).commit if commit: build['commit'] = commit except ProjectImportError: log.error(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg='Failed to import project; skipping build'), exc_info=True) raise return update_output
def update_static_metadata(project_pk, path=None): """Update static metadata JSON file Metadata settings include the following project settings: version The default version for the project, default: `latest` language The default language for the project, default: `en` languages List of languages built by linked translation projects. """ project = Project.objects.get(pk=project_pk) if not path: path = project.static_metadata_path() log.info( LOG_TEMPLATE.format( project=project.slug, version='', msg='Updating static metadata', )) translations = [trans.language for trans in project.translations.all()] languages = set(translations) # Convert to JSON safe types metadata = { 'version': project.default_version, 'language': project.language, 'languages': list(languages), 'single_version': project.single_version, } try: fh = open(path, 'w+') json.dump(metadata, fh) fh.close() Syncer.copy(path, path, host=socket.gethostname(), file=True) except (AttributeError, IOError) as e: log.debug( LOG_TEMPLATE.format( project=project.slug, version='', msg='Cannot write to metadata.json: {0}'.format(e)))
def create_container(self): """Create docker container.""" client = self.get_client() try: log.info( 'Creating Docker container: image=%s', self.container_image, ) self.container = client.create_container( image=self.container_image, command=('/bin/sh -c "sleep {time}; exit {exit}"'.format( time=self.container_time_limit, exit=DOCKER_TIMEOUT_EXIT_CODE, )), name=self.container_id, hostname=self.container_id, host_config=self.get_container_host_config(), detach=True, environment=self.environment, ) client.start(container=self.container_id) except ConnectionError: log.exception( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg=('Could not connect to the Docker API, ' 'make sure Docker is running'), ), ) # We don't raise an error here mentioning Docker, that is a # technical detail that the user can't resolve on their own. # Instead, give the user a generic failure raise BuildEnvironmentError( BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format( build_id=self.build['id'], ), ) except DockerAPIError as e: log.exception( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg=e.explanation, ), ) raise BuildEnvironmentCreationFailed
def delete_existing_venv_dir(self): venv_dir = self.venv_path() # Handle deleting old venv dir if os.path.exists(venv_dir): log.info(LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg='Removing existing venv directory', )) shutil.rmtree(venv_dir)
def update_static_metadata(project_pk, path=None): """Update static metadata JSON file Metadata settings include the following project settings: version The default version for the project, default: `latest` language The default language for the project, default: `en` languages List of languages built by linked translation projects. """ project = Project.objects.get(pk=project_pk) if not path: path = project.static_metadata_path() log.info(LOG_TEMPLATE.format( project=project.slug, version='', msg='Updating static metadata', )) translations = [trans.language for trans in project.translations.all()] languages = set(translations) # Convert to JSON safe types metadata = { 'version': project.default_version, 'language': project.language, 'languages': list(languages), 'single_version': project.single_version, } try: fh = open(path, 'w+') json.dump(metadata, fh) fh.close() Syncer.copy(path, path, host=socket.gethostname(), file=True) except (AttributeError, IOError) as e: log.debug(LOG_TEMPLATE.format( project=project.slug, version='', msg='Cannot write to metadata.json: {0}'.format(e) ))
def __exit__(self, exc_type, exc_value, tb): """End of environment context.""" try: # Update buildenv state given any container error states first self.update_build_from_container_state() client = self.get_client() try: client.kill(self.container_id) except DockerAPIError: log.exception( 'Unable to kill container: id=%s', self.container_id, ) try: log.info('Removing container: id=%s', self.container_id) client.remove_container(self.container_id) # Catch direct failures from Docker API or with a requests HTTP # request. These errors should not surface to the user. except (DockerAPIError, ConnectionError): log.exception( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg="Couldn't remove container", ), ) self.container = None except BuildEnvironmentError: # Several interactions with Docker can result in a top level failure # here. We'll catch this and report if there were no reported errors # already. These errors are not as important as a failure at deeper # code if not all([exc_type, exc_value, tb]): exc_type, exc_value, tb = sys.exc_info() ret = self.handle_exception(exc_type, exc_value, tb) self.update_build(BUILD_STATE_FINISHED) log.info( LOG_TEMPLATE.format(project=self.project.slug, version=self.version.slug, msg='Build finished')) return ret
def delete_existing_build_dir(self): # Handle deleting old build dir build_dir = os.path.join(self.venv_path(), 'build') if os.path.exists(build_dir): log.info( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg='Removing existing build directory', )) shutil.rmtree(build_dir)
def delete_existing_venv_dir(self): venv_dir = self.venv_path() # Handle deleting old venv dir if os.path.exists(venv_dir): log.info( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg='Removing existing venv directory', ), ) shutil.rmtree(venv_dir)
def __enter__(self): """Start of environment context.""" try: # Test for existing container. We remove any stale containers that # are no longer running here if there is a collision. If the # container is still running, this would be a failure of the version # locking code, so we throw an exception. state = self.container_state() if state is not None: if state.get('Running') is True: exc = BuildEnvironmentError( _( 'A build environment is currently ' 'running for this version', ), ) self.failure = exc self.build['state'] = BUILD_STATE_FINISHED raise exc else: log.warning( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg=( 'Removing stale container {0}' .format(self.container_id) ), ) ) client = self.get_client() client.remove_container(self.container_id) except (DockerAPIError, ConnectionError): # If there is an exception here, we swallow the exception as this # was just during a sanity check anyways. pass except BuildEnvironmentError: # There may have been a problem connecting to Docker altogether, or # some other handled exception here. self.__exit__(*sys.exc_info()) raise # Create the checkout path if it doesn't exist to avoid Docker creation if not os.path.exists(self.project.doc_path): os.makedirs(self.project.doc_path) try: self.create_container() except: # noqa self.__exit__(*sys.exc_info()) raise return self
def symlink_cnames(version): """Symlink project CNAME domains OLD Link from HOME/user_builds/cnames/<cname> -> HOME/user_builds/<project>/rtd-builds/ NEW Link from HOME/user_builds/cnametoproject/<cname> -> HOME/user_builds/<project>/ """ try: redis_conn = redis.Redis(**settings.REDIS) cnames = redis_conn.smembers('rtd_slug:v1:%s' % version.project.slug) except redis.ConnectionError: log.error(LOG_TEMPLATE.format( project=version.project.slug, version=version.slug, msg='Failed to symlink cnames, Redis error.'), exc_info=True) return for cname in cnames: log.debug( LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Symlinking CNAME: %s" % cname)) docs_dir = version.project.rtd_build_path(version.slug) # Chop off the version from the end. docs_dir = '/'.join(docs_dir.split('/')[:-1]) # Old symlink location -- Keep this here til we change nginx over symlink = version.project.cnames_symlink_path(cname) run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1])) run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink)) # New symlink location new_docs_dir = version.project.doc_path new_cname_symlink = os.path.join(getattr(settings, 'SITE_ROOT'), 'cnametoproject', cname) run_on_app_servers('mkdir -p %s' % '/'.join(new_cname_symlink.split('/')[:-1])) run_on_app_servers('ln -nsf %s %s' % (new_docs_dir, new_cname_symlink))
def get_client(self): '''Create Docker client connection''' try: if self.client is None: self.client = Client(base_url=self.docker_socket, version=DOCKER_VERSION) return self.client except DockerException as e: log.error(LOG_TEMPLATE.format(project=self.project.slug, version=self.version.slug, msg=e), exc_info=True) raise BuildEnvironmentError('Problem creating build environment')
def webhook_notification(version, build, hook_url): """Send webhook notification for project webhook :param version: Version instance to send hook for :param build: Build instance that failed :param hook_url: Hook URL to send to """ project = version.project data = json.dumps({ 'name': project.name, 'slug': project.slug, 'build': { 'id': build.id, 'success': build.success, 'date': build.date.strftime('%Y-%m-%d %H:%M:%S'), }, 'text': "Docs build {} for {} completed at {}. Check out {}".format( build.id, project.name, build.date.strftime('%Y-%m-%d %H:%M:%S'), version.get_absolute_url()) }) log.info( LOG_TEMPLATE.format(project=project.slug, version='', msg='sending notification to: %s with payload %s' % (hook_url, data))) resp = requests.post(hook_url, data=data, headers={"content-type": "application/json"}) log.info( LOG_TEMPLATE.format( project=project.slug, version='', msg='notification response: status %s with text %s' % (resp.status_code, resp.text)))
def webhook_notification(version, build, hook_url): project = version.project data = json.dumps({ 'name': project.name, 'slug': project.slug, 'build': { 'id': build.id, 'success': build.success, 'date': build.date.strftime('%Y-%m-%d %H:%M:%S'), } }) log.debug(LOG_TEMPLATE.format(project=project.slug, version='', msg='sending notification to: %s' % hook_url)) requests.post(hook_url, data=data)
def create_container(self): """Create docker container.""" client = self.get_client() try: log.info( 'Creating Docker container: image=%s', self.container_image, ) self.container = client.create_container( image=self.container_image, command=('/bin/sh -c "sleep {time}; exit {exit}"' .format(time=self.container_time_limit, exit=DOCKER_TIMEOUT_EXIT_CODE)), name=self.container_id, hostname=self.container_id, host_config=self.get_container_host_config(), detach=True, environment=self.environment, ) client.start(container=self.container_id) except ConnectionError as e: log.exception( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg=( 'Could not connect to the Docker API, ' 'make sure Docker is running' ), ), ) # We don't raise an error here mentioning Docker, that is a # technical detail that the user can't resolve on their own. # Instead, give the user a generic failure raise BuildEnvironmentError( BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format( build_id=self.build['id'], ) ) except DockerAPIError as e: log.exception( LOG_TEMPLATE .format( project=self.project.slug, version=self.version.slug, msg=e.explanation, ), ) raise BuildEnvironmentCreationFailed
def symlink_single_version(version): """ Link from HOME/user_builds/<project>/single_version -> HOME/user_builds/<project>/rtd-builds/<default_version>/ """ default_version = version.project.get_default_version() log.debug(LOG_TEMPLATE.format(project=version.project.slug, version=default_version, msg="Symlinking single_version")) # The single_version directory symlink = version.project.single_version_symlink_path() run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1])) # Where the actual docs live docs_dir = os.path.join(settings.DOCROOT, version.project.slug, 'rtd-builds', default_version) run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
def fileify(version_pk, commit): """ Create ImportedFile objects for all of a version's files. This is a prereq for indexing the docs for search. It also causes celery-haystack to kick off an index of the file. """ version = Version.objects.get(pk=version_pk) project = version.project if not project.cdn_enabled: return if not commit: log.info(LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg='Imported File not being built because no commit information')) path = project.rtd_build_path(version.slug) if path: log.info(LOG_TEMPLATE.format( project=version.project.slug, version=version.slug, msg='Creating ImportedFiles')) _manage_imported_files(version, path, commit) else: log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='No ImportedFile files'))
def delete_existing_build_dir(self): # Handle deleting old build dir build_dir = os.path.join( self.venv_path(), 'build', ) if os.path.exists(build_dir): log.info( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg='Removing existing build directory', ), ) shutil.rmtree(build_dir)
def webhook_notification(version, build, hook_url): """Send webhook notification for project webhook :param version: Version instance to send hook for :param build: Build instance that failed :param hook_url: Hook URL to send to """ project = version.project data = json.dumps( { "name": project.name, "slug": project.slug, "build": {"id": build.id, "success": build.success, "date": build.date.strftime("%Y-%m-%d %H:%M:%S")}, } ) log.debug(LOG_TEMPLATE.format(project=project.slug, version="", msg="sending notification to: %s" % hook_url)) requests.post(hook_url, data=data)
def handle_exception(self, exc_type, exc_value, _): """Exception handling for __enter__ and __exit__ This reports on the exception we're handling and special cases subclasses of BuildEnvironmentException. For :py:class:`BuildEnvironmentWarning`, exit this context gracefully, but don't mark the build as a failure. For all other exception classes, including :py:class:`BuildEnvironmentError`, the build will be marked as a failure and the context will be gracefully exited. """ if exc_type is not None: log.error(LOG_TEMPLATE.format(project=self.project.slug, version=self.version.slug, msg=exc_value), exc_info=True) if not issubclass(exc_type, BuildEnvironmentWarning): self.failure = exc_value return True
def create_container(self): '''Create docker container''' client = self.get_client() image = self.container_image if self.project.container_image: image = self.project.container_image try: self.container = client.create_container( image=image, command=('/bin/sh -c "sleep {time}; exit {exit}"'.format( time=self.container_time_limit, exit=DOCKER_TIMEOUT_EXIT_CODE)), name=self.container_id, hostname=self.container_id, host_config=create_host_config( binds={ SPHINX_TEMPLATE_DIR: { 'bind': SPHINX_TEMPLATE_DIR, 'mode': 'ro' }, MKDOCS_TEMPLATE_DIR: { 'bind': MKDOCS_TEMPLATE_DIR, 'mode': 'ro' }, self.project.doc_path: { 'bind': self.project.doc_path, 'mode': 'rw' }, }), detach=True, environment={ 'READTHEDOCS_VERSION': self.version.slug, 'READTHEDOCS_PROJECT': self.project.slug }, mem_limit=self.container_mem_limit, ) client.start(container=self.container_id) except DockerAPIError as e: log.error(LOG_TEMPLATE.format(project=self.project.slug, version=self.version.slug, msg=e.explanation), exc_info=True) raise BuildEnvironmentError('Build environment creation failed')
def symlink_single_version(version): """ Link from HOME/user_builds/<project>/single_version -> HOME/user_builds/<project>/rtd-builds/<default_version>/ """ default_version = version.project.get_default_version() log.debug( LOG_TEMPLATE.format(project=version.project.slug, version=default_version, msg="Symlinking single_version")) # The single_version directory symlink = version.project.single_version_symlink_path() run_on_app_servers('mkdir -p %s' % '/'.join(symlink.split('/')[:-1])) # Where the actual docs live docs_dir = os.path.join(settings.DOCROOT, version.project.slug, 'rtd-builds', default_version) run_on_app_servers('ln -nsf %s %s' % (docs_dir, symlink))
def handle_exception(self, exc_type, exc_value, _): """ Exception handling for __enter__ and __exit__. This reports on the exception we're handling and special cases subclasses of BuildEnvironmentException. For :py:class:`BuildEnvironmentWarning`, exit this context gracefully, but don't mark the build as a failure. For all other exception classes, including :py:class:`BuildEnvironmentError`, the build will be marked as a failure and the context will be gracefully exited. If the exception's type is :py:class:`BuildEnvironmentWarning` or it's an exception marked as ``WARNING_EXCEPTIONS`` we log the problem as a WARNING, otherwise we log it as an ERROR. """ if exc_type is not None: log_level_function = None if issubclass(exc_type, BuildEnvironmentWarning): log_level_function = log.warning elif exc_type in self.WARNING_EXCEPTIONS: log_level_function = log.warning self.failure = exc_value else: log_level_function = log.error self.failure = exc_value log_level_function( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg=exc_value, ), exc_info=True, extra={ 'stack': True, 'tags': { 'build': self.build.get('id'), 'project': self.project.slug, 'version': self.version.slug, }, }, ) return True
def get_client(self): """Create Docker client connection.""" try: if self.client is None: self.client = Client(base_url=self.docker_socket, version=DOCKER_VERSION, timeout=None) return self.client except DockerException as e: log.exception( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg='Could not connect to Docker API', ), ) # We don't raise an error here mentioning Docker, that is a # technical detail that the user can't resolve on their own. # Instead, give the user a generic failure raise BuildEnvironmentError( BuildEnvironmentError.GENERIC_WITH_BUILD_ID.format( build_id=self.build['id'], ))
def __enter__(self): """Start of environment context""" log.info('Creating container') try: # Test for existing container. We remove any stale containers that # are no longer running here if there is a collision. If the # container is still running, this would be a failure of the version # locking code, so we throw an exception. state = self.container_state() if state is not None: if state.get('Running') is True: exc = BuildEnvironmentError( _('A build environment is currently ' 'running for this version')) self.failure = exc self.build['state'] = BUILD_STATE_FINISHED raise exc else: log.warn( LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg=("Removing stale container {0}".format( self.container_id)))) client = self.get_client() client.remove_container(self.container_id) except DockerAPIError: pass # Create the checkout path if it doesn't exist to avoid Docker creation if not os.path.exists(self.project.doc_path): os.makedirs(self.project.doc_path) try: self.create_container() except: # pylint: disable=broad-except self.__exit__(*sys.exc_info()) raise return self
def symlink_translations(project): """Symlink project translations Link from HOME/user_builds/project/translations/<lang> -> HOME/user_builds/<project>/rtd-builds/ """ translations = {} if getattr(settings, 'DONT_HIT_DB', True): for trans in (api .project(project.pk) .translations.get()['translations']): translations[trans['language']] = trans['slug'] else: for trans in project.translations.all(): translations[trans.language] = trans.slug # Default language, and pointer for 'en' version_slug = project.slug.replace('_', '-') translations[project.language] = version_slug if 'en' not in translations: translations['en'] = version_slug run_on_app_servers( 'mkdir -p {0}' .format(os.path.join(project.doc_path, 'translations'))) for (language, slug) in translations.items(): log.debug(LOG_TEMPLATE.format( project=project.slug, version=project.get_default_version(), msg="Symlinking translation: %s->%s" % (language, slug) )) # The directory for this specific translation symlink = project.translations_symlink_path(language) translation_path = os.path.join(settings.DOCROOT, slug, 'rtd-builds') run_on_app_servers('ln -nsf {0} {1}'.format(translation_path, symlink))
def symlink_translations(version): """Symlink project translations Link from HOME/user_builds/project/translations/<lang> -> HOME/user_builds/<project>/rtd-builds/ """ translations = {} if getattr(settings, 'DONT_HIT_DB', True): for trans in (api .project(version.project.pk) .translations.get()['translations']): translations[trans['language']] = trans['slug'] else: for trans in version.project.translations.all(): translations[trans.language] = trans.slug # Default language, and pointer for 'en' version_slug = version.project.slug.replace('_', '-') translations[version.project.language] = version_slug if 'en' not in translations: translations['en'] = version_slug run_on_app_servers( 'mkdir -p {0}' .format(os.path.join(version.project.doc_path, 'translations'))) for (language, slug) in translations.items(): log.debug(LOG_TEMPLATE.format( project=version.project.slug, version=version.slug, msg="Symlinking translation: %s->%s" % (language, slug) )) # The directory for this specific translation symlink = version.project.translations_symlink_path(language) translation_path = os.path.join(settings.DOCROOT, slug, 'rtd-builds') run_on_app_servers('ln -nsf {0} {1}'.format(translation_path, symlink))
def run_command_class(self, cls, cmd, **kwargs): """ Run command from this environment. Use ``cls`` to instantiate a command :param warn_only: Don't raise an exception on command failure """ warn_only = kwargs.pop('warn_only', False) # Remove PATH from env, and set it to bin_path if it isn't passed in env_path = self.environment.pop('BIN_PATH', None) if 'bin_path' not in kwargs and env_path: kwargs['bin_path'] = env_path assert 'environment' not in kwargs, "environment can't be passed in via commands." kwargs['environment'] = self.environment kwargs['build_env'] = self build_cmd = cls(cmd, **kwargs) self.commands.append(build_cmd) build_cmd.run() # Save to database if self.record: build_cmd.save() if build_cmd.failed: msg = u'Command {cmd} failed'.format(cmd=build_cmd.get_command()) if build_cmd.output: msg += u':\n{out}'.format(out=build_cmd.output) if warn_only: log.warn( LOG_TEMPLATE.format(project=self.project.slug, version=self.version.slug, msg=msg)) else: raise BuildEnvironmentWarning(msg) return build_cmd
def setup_base(self): conda_env_path = os.path.join(self.project.doc_path, 'conda') version_path = os.path.join(conda_env_path, self.version.slug) if os.path.exists(version_path): # Re-create conda directory each time to keep fresh state log.info(LOG_TEMPLATE.format( project=self.project.slug, version=self.version.slug, msg='Removing existing conda directory', )) shutil.rmtree(version_path) self.build_env.run( 'conda', 'env', 'create', '--name', self.version.slug, '--file', self.config.conda.environment, bin_path=None, # Don't use conda bin that doesn't exist yet cwd=self.checkout_path, )
def webhook_notification(version, build, hook_url): """Send webhook notification for project webhook :param version: Version instance to send hook for :param build: Build instance that failed :param hook_url: Hook URL to send to """ project = version.project data = json.dumps({ 'name': project.name, 'slug': project.slug, 'build': { 'id': build.id, 'success': build.success, 'date': build.date.strftime('%Y-%m-%d %H:%M:%S'), } }) log.debug( LOG_TEMPLATE.format(project=project.slug, version='', msg='sending notification to: %s' % hook_url)) requests.post(hook_url, data=data)
def record_pdf(record, results, state, version): if not record or 'sphinx' not in version.project.documentation_type: return None if not version.project.enable_pdf_build: return None try: if 'pdf' in results: pdf_exit = results['pdf'][0] pdf_success = pdf_exit == 0 pdf_output = results['pdf'][1] pdf_error = results['pdf'][2] else: pdf_exit = 999 pdf_success = False pdf_output = pdf_error = "PDF Failed" pdf_output = pdf_output.decode('utf-8', 'ignore') pdf_error = pdf_error.decode('utf-8', 'ignore') if 'Output written on' in pdf_output: pdf_success = True api_v1.build.post(dict( state=state, project='/api/v1/project/%s/' % version.project.pk, version='/api/v1/version/%s/' % version.pk, success=pdf_success, type='pdf', output=pdf_output, error=pdf_error, exit_code=pdf_exit, builder=socket.gethostname(), )) except Exception: log.error(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to post a new build"), exc_info=True)
def setup_environment(version): """ Build the virtualenv and install the project into it. Always build projects with a virtualenv. """ ret_dict = {} project = version.project build_dir = os.path.join(project.venv_path(version=version.slug), 'build') if os.path.exists(build_dir): log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Removing existing build dir')) shutil.rmtree(build_dir) if project.use_system_packages: site_packages = '--system-site-packages' else: site_packages = '--no-site-packages' # TODO: why is this virtualenv-2.7? # Here the command has been modified to support different # interpreters. ret_dict['venv'] = run( '{cmd} {site_packages} {path}'.format( cmd='virtualenv-2.7 -p {interpreter}'.format( interpreter=project.python_interpreter), site_packages=site_packages, path=project.venv_path(version=version.slug) ) ) # Other code expects sphinx-build to be installed inside the # virtualenv. Using the -I option makes sure it gets installed # even if it is already installed system-wide (and # --system-site-packages is used) if project.use_system_packages: ignore_option = '-I' else: ignore_option = '' wheeldir = os.path.join(settings.SITE_ROOT, 'deploy', 'wheels') ret_dict['doc_builder'] = run( ( '{cmd} install --use-wheel --find-links={wheeldir} -U {ignore_option} ' 'sphinx==1.2.2 virtualenv==1.10.1 setuptools==1.1 docutils==0.11 readthedocs-sphinx-ext==0.4.4 mkdocs==0.11.1 mock==1.0.1 pillow==2.6.1' ).format( cmd=project.venv_bin(version=version.slug, bin='pip'), ignore_option=ignore_option, wheeldir=wheeldir, ) ) # Handle requirements requirements_file_path = project.requirements_file checkout_path = project.checkout_path(version.slug) if not requirements_file_path: docs_dir = builder_loading.get(project.documentation_type)(version).docs_dir() for path in [docs_dir, '']: for req_file in ['pip_requirements.txt', 'requirements.txt']: test_path = os.path.join(checkout_path, path, req_file) print('Testing %s' % test_path) if os.path.exists(test_path): requirements_file_path = test_path break if requirements_file_path: os.chdir(checkout_path) ret_dict['requirements'] = run( '{cmd} install --exists-action=w -r {requirements}'.format( cmd=project.venv_bin(version=version.slug, bin='pip'), requirements=requirements_file_path)) # Handle setup.py os.chdir(project.checkout_path(version.slug)) if os.path.isfile("setup.py"): if getattr(settings, 'USE_PIP_INSTALL', False): ret_dict['install'] = run( '{cmd} install --ignore-installed .'.format( cmd=project.venv_bin(version=version.slug, bin='pip'))) else: ret_dict['install'] = run( '{cmd} setup.py install --force'.format( cmd=project.venv_bin(version=version.slug, bin='python'))) else: ret_dict['install'] = (999, "", "No setup.py, skipping install") return ret_dict
def update_imported_docs(version_pk): """ Check out or update the given project's repository :param version_pk: Version id to update """ version_data = api_v1.version(version_pk).get() version = make_api_version(version_data) project = version.project ret_dict = {} # Make Dirs if not os.path.exists(project.doc_path): os.makedirs(project.doc_path) if not project.vcs_repo(): raise ProjectImportError(("Repo type '{0}' unknown".format(project.repo_type))) with project.repo_nonblockinglock( version=version, max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): before_vcs.send(sender=version) # Get the actual code on disk if version: log.info( LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg='Checking out version {slug}: {identifier}'.format( slug=version.slug, identifier=version.identifier ) ) ) version_slug = version.slug version_repo = project.vcs_repo(version_slug) ret_dict['checkout'] = version_repo.checkout( version.identifier, ) else: # Does this ever get called? log.info(LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg='Updating to latest revision')) version_slug = LATEST version_repo = project.vcs_repo(version_slug) ret_dict['checkout'] = version_repo.update() after_vcs.send(sender=version) # Update tags/version version_post_data = {'repo': version_repo.repo_url} if version_repo.supports_tags: version_post_data['tags'] = [ {'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.tags ] if version_repo.supports_branches: version_post_data['branches'] = [ {'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.branches ] try: api_v2.project(project.pk).sync_versions.post(version_post_data) except Exception, e: print "Sync Versions Exception: %s" % e.message
build_results = build_docs(version, force, search, localmedia) results.update(build_results) except vcs_support_utils.LockTimeout, e: results['checkout'] = (423, "", "Version locked, retrying in 5 minutes.") log.info(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to lock, will retry")) # http://celery.readthedocs.org/en/3.0/userguide/tasks.html#retrying # Should completely retry the task for us until max_retries is exceeded update_docs.retry(exc=e, throw=False) except ProjectImportError, e: results['checkout'] = (404, "", 'Failed to import project; skipping build.\n\nError\n-----\n\n%s' % e.message) # Close out build in finally with error. pass except Exception, e: log.error(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Top-level Build Failure"), exc_info=True) results['checkout'] = (404, "", 'Top-level Build Failure: %s' % e.message) finally: record_build(build=build, record=record, results=results, state='finished', start_time=start_time) record_pdf(record=record, results=results, state='finished', version=version) log.info(LOG_TEMPLATE.format(project=version.project.slug, version='', msg='Build finished')) build_id = build.get('id') # Web Server Tasks if build_id: finish_build.delay( version_pk=version.pk, build_pk=build_id, hostname=socket.gethostname(), html=results.get('html', [404])[0] == 0, localmedia=results.get('localmedia', [404])[0] == 0,
def _log(self, msg): log.info(LOG_TEMPLATE.format(project=self.project.slug, version=self.version.slug, msg=msg))
def update_docs(pk, version_pk=None, build_pk=None, record=True, docker=False, search=True, force=False, intersphinx=True, localmedia=True, basic=False, **kwargs): """ The main entry point for updating documentation. It handles all of the logic around whether a project is imported or we created it. Then it will build the html docs and other requested parts. `pk` Primary key of the project to update `record` Whether or not to keep a record of the update in the database. Useful for preventing changes visible to the end-user when running commands from the shell, for example. """ start_time = datetime.datetime.utcnow() try: project_data = api_v1.project(pk).get() except HttpClientError: log.exception(LOG_TEMPLATE.format(project=pk, version='', msg='Failed to get project data on build. Erroring.')) project = make_api_project(project_data) # Don't build skipped projects if project.skip: log.info(LOG_TEMPLATE.format(project=project.slug, version='', msg='Skipping')) return else: log.info(LOG_TEMPLATE.format(project=project.slug, version='', msg='Building')) version = ensure_version(project, version_pk) build = create_build(build_pk) results = {} # Build Servery stuff try: record_build(build=build, record=record, results=results, state='cloning') vcs_results = setup_vcs(version, build) if vcs_results: results.update(vcs_results) if project.documentation_type == 'auto': update_documentation_type(version) if docker or settings.DOCKER_ENABLE: record_build(build=build, record=record, results=results, state='building') docker = DockerEnvironment(version) build_results = docker.build() results.update(build_results) else: record_build(build=build, record=record, results=results, state='installing') setup_results = setup_environment(version) results.update(setup_results) record_build(build=build, record=record, results=results, state='building') build_results = build_docs(version, force, search, localmedia) results.update(build_results) except vcs_support_utils.LockTimeout, e: results['checkout'] = (423, "", "Version locked, retrying in 5 minutes.") log.info(LOG_TEMPLATE.format(project=version.project.slug, version=version.slug, msg="Unable to lock, will retry")) # http://celery.readthedocs.org/en/3.0/userguide/tasks.html#retrying # Should completely retry the task for us until max_retries is exceeded update_docs.retry(exc=e, throw=False)
def build_docs(version, force, pdf, man, epub, dash, search, localmedia): """ This handles the actual building of the documentation """ project = version.project results = {} before_build.send(sender=version) with project.repo_nonblockinglock(version=version, max_lock_age=getattr(settings, 'REPO_LOCK_SECONDS', 30)): html_builder = builder_loading.get(project.documentation_type)(version) if force: html_builder.force() html_builder.append_conf() results['html'] = html_builder.build() if results['html'][0] == 0: html_builder.move() # Gracefully attempt to move files via task on web workers. try: move_files.delay( version_pk=version.pk, html=True, hostname=socket.gethostname(), ) except socket.error: pass fake_results = (999, "Project Skipped, Didn't build", "Project Skipped, Didn't build") if 'mkdocs' in project.documentation_type: if search: try: search_builder = builder_loading.get('mkdocs_json')(version) results['search'] = search_builder.build() if results['search'][0] == 0: search_builder.move() except: log.error(LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg="JSON Build Error"), exc_info=True) if 'sphinx' in project.documentation_type: # Search builder. Creates JSON from docs and sends it to the # server. if search: try: search_builder = builder_loading.get( 'sphinx_search')(version) results['search'] = search_builder.build() if results['search'][0] == 0: # Copy json for safe keeping search_builder.move() except: log.error(LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg="JSON Build Error"), exc_info=True) # Local media builder for singlepage HTML download archive if localmedia: try: localmedia_builder = builder_loading.get( 'sphinx_singlehtmllocalmedia')(version) results['localmedia'] = localmedia_builder.build() if results['localmedia'][0] == 0: localmedia_builder.move() except: log.error(LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg="Local Media HTML Build Error"), exc_info=True) # Optional build steps if version.project.slug not in HTML_ONLY and not project.skip: if pdf: pdf_builder = builder_loading.get('sphinx_pdf')(version) results['pdf'] = pdf_builder.build() # Always move pdf results even when there's an error. # if pdf_results[0] == 0: pdf_builder.move() else: results['pdf'] = fake_results if epub: epub_builder = builder_loading.get('sphinx_epub')(version) results['epub'] = epub_builder.build() if results['epub'][0] == 0: epub_builder.move() else: results['epub'] = fake_results after_build.send(sender=version) return results
def _log(self, msg): log.info( LOG_TEMPLATE.format(project=self.project.slug, version=self.version.slug, msg=msg))
def _log_warning(self, msg): log.warning(LOG_TEMPLATE.format( project=self.project.slug, version='latest', msg=msg, ))