def up(self): retcode = self.run('svn', 'revert', '--recursive', '.')[0] if retcode != 0: raise ProjectImportError( "Failed to get code from '%s' (svn revert): %s" % (self.repo_url, retcode) ) retcode = self.run('svn', 'up', '--accept', 'theirs-full', '--trust-server-cert', '--non-interactive')[0] if retcode != 0: raise ProjectImportError( "Failed to get code from '%s' (svn up): %s" % (self.repo_url, retcode) )
def pull(self): pull_output = self.run('hg', 'pull') if pull_output[0] != 0: raise ProjectImportError( ("Failed to get code from '%s' (hg pull): %s" % (self.repo_url, pull_output[0]))) update_output = self.run('hg', 'update', '-C')[0] if update_output[0] != 0: raise ProjectImportError( ("Failed to get code from '%s' (hg update): %s" % (self.repo_url, pull_output[0]))) return update_output
def up(self): retcode = self.run('bzr', 'revert')[0] if retcode != 0: raise ProjectImportError( ("Failed to get code from '%s' (bzr revert): %s" % (self.repo_url, retcode))) up_output = self.run('bzr', 'up') if up_output[0] != 0: raise ProjectImportError( ("Failed to get code from '%s' (bzr up): %s" % (self.repo_url, retcode))) return up_output
def fetch(self): code, out, err = self.run('git', 'fetch', '--tags', '--prune') if code != 0: raise ProjectImportError( "Failed to get code from '%s' (git fetch): %s\n\nStderr:\n\n%s\n\n" % ( self.repo_url, code, err) )
def append_conf(self, **__): """Modify given ``conf.py`` file from a whitelisted user's project.""" try: self.version.get_conf_py_path() except ProjectImportError: master_doc = self.create_index(extension='rst') self._write_config(master_doc=master_doc) try: outfile_path = self.project.conf_file(self.version.slug) outfile = codecs.open(outfile_path, encoding='utf-8', mode='a') except (ProjectImportError, IOError): trace = sys.exc_info()[2] raise ProjectImportError('Conf file not found'), None, trace # Append config to project conf file tmpl = template_loader.get_template('doc_builder/conf.py.tmpl') rendered = tmpl.render(self.get_config_params()) with outfile: outfile.write("\n") outfile.write(rendered) # Print the contents of conf.py in order to make the rendered # configfile visible in the build logs self.run( 'cat', os.path.relpath(outfile_path, self.project.checkout_path(self.version.slug)), cwd=self.project.checkout_path(self.version.slug), )
def clone(self): self.make_clean_working_dir() retcode = self.run('bzr', 'checkout', self.repo_url, '.')[0] if retcode != 0: raise ProjectImportError( ("Failed to get code from '%s' (bzr checkout): %s" % (self.repo_url, retcode)))
def clone(self): output = self.run('hg', 'clone', self.repo_url, '.') if output[0] != 0: raise ProjectImportError( "Failed to get code from '%s' (hg clone): %s" % (self.repo_url, output[0])) return output
def load_yaml_config(version): """ Load a configuration from `readthedocs.yml` file. This uses the configuration logic from `readthedocs-build`, which will keep parsing consistent between projects. """ checkout_path = version.project.checkout_path(version.slug) try: config = load_config( path=checkout_path, env_config={ 'output_base': '', 'type': 'sphinx', 'name': version.slug, }, )[0] except InvalidConfig as e: # This is a subclass of ConfigError, so has to come first raise ProjectImportError(e.message) except ConfigError: config = BuildConfig( env_config={}, raw_config={}, source_file='empty', source_position=0, ) return ConfigWrapper(version=version, yaml_config=config)
def clone(self): code, out, err = self.run('git', 'clone', '--recursive', '--quiet', self.repo_url, '.') if code != 0: raise ProjectImportError( "Failed to get code from '%s' (git clone): %s" % (self.repo_url, code))
def clone(self): self.make_clean_working_dir() output = self.run('hg', 'clone', self.repo_url, '.') if output[0] != 0: raise ProjectImportError( ("Failed to get code from '%s' (hg clone): %s" % (self.repo_url, output[0]))) return output
def pull(self): code, out, err = self.run('git', 'fetch') code, out, err = self.run('git', 'fetch', '-t') if code != 0: raise ProjectImportError( "Failed to get code from '%s' (git fetch): %s" % ( self.repo_url, code) )
def clone(self): code, _, err = self.run('git', 'clone', '--recursive', '--quiet', self.repo_url, '.') if code != 0: raise ProjectImportError( ("Failed to get code from '{url}' (git clone): {exit}\n\n" "git clone error output: {sterr}").format(url=self.repo_url, exit=code, sterr=err))
def co(self, identifier=None): if identifier: url = self.base_url + identifier else: url = self.repo_url retcode = self.run('svn', 'checkout', '--quiet', url, '.')[0] if retcode != 0: raise ProjectImportError( "Failed to get code from '%s' (svn checkout): %s" % (url, retcode) )
def co(self, identifier=None): self.make_clean_working_dir() if identifier: url = self.base_url + identifier else: url = self.repo_url retcode, out, err = self.run('svn', 'checkout', '--quiet', url, '.') if retcode != 0: raise ProjectImportError( "Failed to get code from '%s' (svn checkout): %s" % (url, retcode)) return retcode, out, err
def conf_file(self, version='latest'): if self.conf_py_file: log.debug('Inserting conf.py file path from model') return os.path.join(self.checkout_path(version), self.conf_py_file) files = self.find('conf.py', version) if not files: files = self.full_find('conf.py', version) if len(files) == 1: return files[0] elif len(files) > 1: for file in files: if file.find('doc', 70) != -1: return file else: raise ProjectImportError(_("Conf File Missing."))
def conf_file(self, version=LATEST): if self.conf_py_file: conf_path = os.path.join(self.checkout_path(version), self.conf_py_file) if os.path.exists(conf_path): log.info('Inserting conf.py file path from model') return conf_path else: log.warning("Conf file specified on model doesn't exist") files = self.find('conf.py', version) if not files: files = self.full_find('conf.py', version) if len(files) == 1: return files[0] for file in files: if file.find('doc', 70) != -1: return file # Having this be translatable causes this odd error: # ProjectImportError(<django.utils.functional.__proxy__ object at # 0x1090cded0>,) raise ProjectImportError( u"Conf File Missing. Please make sure you have a conf.py in your project.")
def append_conf(self, **kwargs): """Modify the given ``conf.py`` file from a whitelisted user's project. """ # Pull config data try: conf_py_path = self.version.get_conf_py_path() except ProjectImportError: self._write_config() self.create_index(extension='rst') project = self.version.project # Open file for appending. try: outfile = codecs.open(project.conf_file(self.version.slug), encoding='utf-8', mode='a') except IOError: trace = sys.exc_info()[2] raise ProjectImportError('Conf file not found'), None, trace outfile.write("\n") conf_py_path = self.version.get_conf_py_path() remote_version = self.version.get_vcs_slug() github_user, github_repo = version_utils.get_github_username_repo( url=self.version.project.repo) github_version_is_editable = (self.version.type == 'branch') display_github = github_user is not None bitbucket_user, bitbucket_repo = version_utils.get_bitbucket_username_repo( url=self.version.project.repo) bitbucket_version_is_editable = (self.version.type == 'branch') display_bitbucket = bitbucket_user is not None rtd_ctx = Context({ 'current_version': self.version.verbose_name, 'project': project, 'settings': settings, 'static_path': STATIC_DIR, 'template_path': TEMPLATE_DIR, 'conf_py_path': conf_py_path, 'api_host': getattr(settings, 'SLUMBER_API_HOST', 'https://readthedocs.org'), # GitHub 'github_user': github_user, 'github_repo': github_repo, 'github_version': remote_version, 'github_version_is_editable': github_version_is_editable, 'display_github': display_github, # BitBucket 'bitbucket_user': bitbucket_user, 'bitbucket_repo': bitbucket_repo, 'bitbucket_version': remote_version, 'bitbucket_version_is_editable': bitbucket_version_is_editable, 'display_bitbucket': display_bitbucket, 'commit': self.version.project.vcs_repo(self.version.slug).commit, }) # Avoid hitting database and API if using Docker build environment if getattr(settings, 'DONT_HIT_API', False): rtd_ctx['versions'] = project.active_versions() rtd_ctx['downloads'] = self.version.get_downloads(pretty=True) else: rtd_ctx['versions'] = project.api_versions() rtd_ctx['downloads'] = (api.version(self.version.pk) .get()['downloads']) rtd_string = template_loader.get_template('doc_builder/conf.py.tmpl').render(rtd_ctx) outfile.write(rtd_string)
def append_conf(self, **kwargs): """Modify the given ``conf.py`` file from a whitelisted user's project. """ # Pull config data try: conf_py_path = self.version.get_conf_py_path() except ProjectImportError: master_doc = self.create_index(extension='rst') self._write_config(master_doc=master_doc) project = self.project # Open file for appending. outfile_path = project.conf_file(self.version.slug) try: outfile = codecs.open(outfile_path, encoding='utf-8', mode='a') except IOError: trace = sys.exc_info()[2] raise ProjectImportError('Conf file not found'), None, trace try: outfile.write("\n") # TODO this should be handled better in the theme conf_py_path = os.path.join(os.path.sep, self.version.get_conf_py_path(), '') remote_version = self.version.commit_name github_user, github_repo = version_utils.get_github_username_repo( url=self.project.repo) github_version_is_editable = (self.version.type == 'branch') display_github = github_user is not None bitbucket_user, bitbucket_repo = version_utils.get_bitbucket_username_repo( url=self.project.repo) bitbucket_version_is_editable = (self.version.type == 'branch') display_bitbucket = bitbucket_user is not None rtd_ctx = { 'current_version': self.version.verbose_name, 'project': project, 'settings': settings, 'static_path': SPHINX_STATIC_DIR, 'template_path': SPHINX_TEMPLATE_DIR, 'conf_py_path': conf_py_path, 'api_host': getattr(settings, 'PUBLIC_API_URL', 'https://readthedocs.org'), # GitHub 'github_user': github_user, 'github_repo': github_repo, 'github_version': remote_version, 'github_version_is_editable': github_version_is_editable, 'display_github': display_github, # BitBucket 'bitbucket_user': bitbucket_user, 'bitbucket_repo': bitbucket_repo, 'bitbucket_version': remote_version, 'bitbucket_version_is_editable': bitbucket_version_is_editable, 'display_bitbucket': display_bitbucket, 'commit': self.project.vcs_repo(self.version.slug).commit, } # Avoid hitting database and API if using Docker build environment if getattr(settings, 'DONT_HIT_API', False): rtd_ctx['versions'] = project.active_versions() rtd_ctx['downloads'] = self.version.get_downloads(pretty=True) else: rtd_ctx['versions'] = project.api_versions() rtd_ctx['downloads'] = (api.version( self.version.pk).get()['downloads']) rtd_string = template_loader.get_template( 'doc_builder/conf.py.tmpl').render(rtd_ctx) outfile.write(rtd_string) finally: outfile.close() # Print the contents of conf.py in order to make the rendered # configfile visible in the build logs self.run( 'cat', os.path.basename(outfile_path), cwd=os.path.dirname(outfile_path), )
def update_imported_docs(version_pk): """ Check out or update the given project's repository. """ version_data = api.version(version_pk).get() version = make_api_version(version_data) project = version.project # Make Dirs if not os.path.exists(project.doc_path): os.makedirs(project.doc_path) with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)): update_docs_output = {} if not project.vcs_repo(): raise ProjectImportError("Repo type '{repo_type}' unknown".format( repo_type=project.repo_type)) # Get the actual code on disk if version: log.info('Checking out version {slug}: {identifier}'.format( slug=version.slug, identifier=version.identifier)) version_slug = version.slug version_repo = project.vcs_repo(version_slug) update_docs_output['checkout'] = version_repo.checkout( version.identifier) else: # Does this ever get called? log.info('Updating to latest revision') version_slug = 'latest' version_repo = project.vcs_repo(version_slug) update_docs_output['checkout'] = version_repo.update() # Ensure we have a conf file (an exception is raised if not) project.conf_file(version.slug) # Do Virtualenv bits: if project.use_virtualenv: if project.use_system_packages: site_packages = '--system-site-packages' else: site_packages = '--no-site-packages' update_docs_output['venv'] = run( '{cmd} --distribute {site_packages} {path}'.format( cmd='virtualenv', site_packages=site_packages, path=project.venv_path(version=version_slug))) # Other code expects sphinx-build to be installed inside the virtualenv. # Using the -I option makes sure it gets installed even if it is # already installed system-wide (and --system-site-packages is used) if project.use_system_packages: ignore_option = '-I' else: ignore_option = '' update_docs_output['sphinx'] = run( '{cmd} install -U {ignore_option} hg+http://bitbucket.org/birkenfeld/sphinx/@d4c6ac1fcc9c#egg=Sphinx virtualenv==1.8.2 distribute==0.6.28 docutils==0.8.1' .format(cmd=project.venv_bin(version=version_slug, bin='pip'), ignore_option=ignore_option)) if project.requirements_file: os.chdir(project.checkout_path(version_slug)) update_docs_output['requirements'] = run( '{cmd} install -r {requirements}'.format( cmd=project.venv_bin(version=version_slug, bin='pip'), requirements=project.requirements_file)) os.chdir(project.checkout_path(version_slug)) update_docs_output['install'] = run( '{cmd} setup.py install --force'.format( cmd=project.venv_bin(version=version_slug, bin='python'))) # check tags/version #XXX:dc: what in this block raises the values error? try: old_versions = [ obj['identifier'] for obj in api.version.get(project__slug=project.slug, limit=5000)['objects'] ] if version_repo.supports_tags: transaction.enter_transaction_management(True) tags = version_repo.tags for tag in tags: if tag.identifier in old_versions: continue log.debug('NEW TAG: (%s not in %s)' % (tag.identifier, old_versions)) slug = slugify_uniquely(Version, tag.verbose_name, 'slug', 255, project=project) try: version_data = api.version.post( dict(project="/api/v1/project/%s/" % project.pk, slug=slug, identifier=tag.identifier, verbose_name=tag.verbose_name)) ver = make_api_version(version_data) log.info("New tag found: {0}".format(tag.identifier)) ver, highest = project.highest_version[1] ver_obj = mkversion(ver) #TODO: Handle updating higher versions automatically. #This never worked very well, anyways. if highest and ver_obj and ver_obj > highest: log.info("Highest version known, building docs") update_docs.delay(ver.project.pk, version_pk=ver.pk) except Exception, e: log.error("Failed to create version (tag)", exc_info=True) transaction.rollback() transaction.leave_transaction_management() if version_repo.supports_branches: transaction.enter_transaction_management(True) branches = version_repo.branches for branch in branches: if branch.identifier in old_versions: continue log.debug('NEW BRANCH: (%s not in %s)' % (branch, old_versions)) slug = slugify_uniquely(Version, branch.verbose_name, 'slug', 255, project=project) try: api.version.post( dict(project="/api/v1/project/%s/" % project.pk, slug=slug, identifier=branch.identifier, verbose_name=branch.verbose_name)) log.info("New branch found: {0}".format( branch.identifier)) except Exception, e: log.error("Failed to create version (branch)", exc_info=True) transaction.rollback() transaction.leave_transaction_management()
def clone(self): retcode = self.run('bzr', 'checkout', self.repo_url, '.')[0] if retcode != 0: raise ProjectImportError( "Failed to get code from '%s' (bzr checkout): %s" % (self.repo_url, retcode) )
def update_imported_docs(version_pk): """ Check out or update the given project's repository :param version_pk: Version id to update """ version_data = api_v1.version(version_pk).get() version = make_api_version(version_data) project = version.project ret_dict = {} # Make Dirs if not os.path.exists(project.doc_path): os.makedirs(project.doc_path) if not project.vcs_repo(): raise ProjectImportError( ("Repo type '{0}' unknown".format(project.repo_type))) with project.repo_nonblockinglock(version=version, max_lock_age=getattr( settings, 'REPO_LOCK_SECONDS', 30)): # Get the actual code on disk try: before_vcs.send(sender=version) if version: log.info( LOG_TEMPLATE.format( project=project.slug, version=version.slug, msg='Checking out version {slug}: {identifier}'.format( slug=version.slug, identifier=version.identifier))) version_slug = version.slug version_repo = project.vcs_repo(version_slug) ret_dict['checkout'] = version_repo.checkout( version.identifier) else: # Does this ever get called? log.info( LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Updating to latest revision')) version_slug = LATEST version_repo = project.vcs_repo(version_slug) ret_dict['checkout'] = version_repo.update() except Exception: raise finally: after_vcs.send(sender=version) # Update tags/version version_post_data = {'repo': version_repo.repo_url} if version_repo.supports_tags: version_post_data['tags'] = [{ 'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.tags] if version_repo.supports_branches: version_post_data['branches'] = [{ 'identifier': v.identifier, 'verbose_name': v.verbose_name, } for v in version_repo.branches] try: api_v2.project(project.pk).sync_versions.post(version_post_data) except Exception as e: print "Sync Versions Exception: %s" % e.message return ret_dict