def symlink_cnames(self, domain=None): """ Symlink project CNAME domains. Link from HOME/$CNAME_ROOT/<cname> -> HOME/$WEB_ROOT/<project> Also give cname -> project link Link from HOME/public_cname_project/<cname> -> HOME/<project>/ """ if domain: domains = [domain] else: domains = Domain.objects.filter(project=self.project) for dom in domains: log_msg = 'Symlinking CNAME: {0} -> {1}'.format( dom.domain, self.project.slug) log.info( constants.LOG_TEMPLATE.format(project=self.project.slug, version='', msg=log_msg)) # CNAME to doc root symlink = os.path.join(self.CNAME_ROOT, dom.domain) run(['ln', '-nsf', self.project_root, symlink]) # Project symlink project_cname_symlink = os.path.join(self.PROJECT_CNAME_ROOT, dom.domain) run(['ln', '-nsf', self.project.doc_path, project_cname_symlink])
def symlink_versions(self): """Symlink project's versions Link from $WEB_ROOT/<project>/<language>/<version>/ -> HOME/user_builds/<project>/rtd-builds/<version> """ versions = set() version_dir = os.path.join(self.WEB_ROOT, self.project.slug, self.project.language) # Include active public versions, # as well as public versions that are built but not active, for archived versions version_queryset = self.get_version_queryset() if version_queryset.count(): if not os.path.exists(version_dir): os.makedirs(version_dir) for version in version_queryset: self._log(u"Symlinking Version: %s" % version) symlink = os.path.join(version_dir, version.slug) docs_dir = os.path.join(settings.DOCROOT, self.project.slug, 'rtd-builds', version.slug) run('ln -nsf {0} {1}'.format(docs_dir, symlink)) versions.add(version.slug) # Remove old symlinks if os.path.exists(version_dir): for old_ver in os.listdir(version_dir): if old_ver not in versions: os.unlink(os.path.join(version_dir, old_ver))
def symlink_cnames(self, domain=None): """Symlink project CNAME domains Link from HOME/$CNAME_ROOT/<cname> -> HOME/$WEB_ROOT/<project> Also give cname -> project link Link from HOME/public_cname_project/<cname> -> HOME/<project>/ """ if domain: domains = [domain] else: domains = Domain.objects.filter(project=self.project) for dom in domains: self._log(u"Symlinking CNAME: {0} -> {1}".format( dom.domain, self.project.slug)) # CNAME to doc root symlink = os.path.join(self.CNAME_ROOT, dom.domain) run(['ln', '-nsf', self.project_root, symlink]) # Project symlink project_cname_symlink = os.path.join(self.PROJECT_CNAME_ROOT, dom.domain) run(['ln', '-nsf', self.project.doc_path, project_cname_symlink])
def symlink_translations(self): """Symlink project translations Link from $WEB_ROOT/<project>/<language>/ -> $WEB_ROOT/<translation>/<language>/ """ translations = {} for trans in self.get_translations(): translations[trans.language] = trans.slug # Make sure the language directory is a directory language_dir = os.path.join(self.project_root, self.project.language) if os.path.islink(language_dir): os.unlink(language_dir) if not os.path.lexists(language_dir): os.makedirs(language_dir) for (language, slug) in translations.items(): self._log(u"Symlinking translation: {0}->{1}".format(language, slug)) symlink = os.path.join(self.project_root, language) docs_dir = os.path.join(self.WEB_ROOT, slug, language) run('ln -nsf {0} {1}'.format(docs_dir, symlink)) # Remove old symlinks for lang in os.listdir(self.project_root): if (lang not in translations and lang not in ['projects', self.project.language]): to_delete = os.path.join(self.project_root, lang) if os.path.islink(to_delete): os.unlink(to_delete) else: shutil.rmtree(to_delete)
def symlink_subprojects(self): """Symlink project subprojects Link from $WEB_ROOT/projects/<project> -> $WEB_ROOT/<project> """ subprojects = set() rels = self.get_subprojects() if rels.count(): # Don't creat the `projects/` directory unless subprojects exist. if not os.path.exists(self.subproject_root): os.makedirs(self.subproject_root) for rel in rels: # A mapping of slugs for the subproject URL to the actual built # documentation from_to = OrderedDict({rel.child.slug: rel.child.slug}) subprojects.add(rel.child.slug) if rel.alias: from_to[rel.alias] = rel.child.slug subprojects.add(rel.alias) for from_slug, to_slug in from_to.items(): self._log(u"Symlinking subproject: {0} -> {1}".format( from_slug, to_slug)) symlink = os.path.join(self.subproject_root, from_slug) docs_dir = os.path.join(self.WEB_ROOT, to_slug) symlink_dir = os.sep.join(symlink.split(os.path.sep)[:-1]) if not os.path.lexists(symlink_dir): os.makedirs(symlink_dir) run('ln -nsf %s %s' % (docs_dir, symlink)) # Remove old symlinks if os.path.exists(self.subproject_root): for subproj in os.listdir(self.subproject_root): if subproj not in subprojects: os.unlink(os.path.join(self.subproject_root, subproj))
def symlink_versions(self): """Symlink project's versions Link from $WEB_ROOT/<project>/<language>/<version>/ -> HOME/user_builds/<project>/rtd-builds/<version> """ versions = set() version_dir = os.path.join(self.WEB_ROOT, self.project.slug, self.project.language) # Include active public versions, # as well as public versions that are built but not active, for archived versions version_queryset = self.get_version_queryset() if version_queryset.count(): if not os.path.exists(version_dir): safe_makedirs(version_dir) for version in version_queryset: self._log(u"Symlinking Version: %s" % version) symlink = os.path.join(version_dir, version.slug) docs_dir = os.path.join(settings.DOCROOT, self.project.slug, 'rtd-builds', version.slug) run(['ln', '-nsf', docs_dir, symlink]) versions.add(version.slug) # Remove old symlinks if os.path.exists(version_dir): for old_ver in os.listdir(version_dir): if old_ver not in versions: os.unlink(os.path.join(version_dir, old_ver))
def symlink_translations(self): """Symlink project translations Link from $WEB_ROOT/<project>/<language>/ -> $WEB_ROOT/<translation>/<language>/ """ translations = {} for trans in self.get_translations(): translations[trans.language] = trans.slug # Make sure the language directory is a directory language_dir = os.path.join(self.project_root, self.project.language) if os.path.islink(language_dir): os.unlink(language_dir) if not os.path.lexists(language_dir): safe_makedirs(language_dir) for (language, slug) in list(translations.items()): self._log(u"Symlinking translation: {0}->{1}".format( language, slug)) symlink = os.path.join(self.project_root, language) docs_dir = os.path.join(self.WEB_ROOT, slug, language) run(['ln', '-nsf', docs_dir, symlink]) # Remove old symlinks for lang in os.listdir(self.project_root): if (lang not in translations and lang not in ['projects', self.project.language]): to_delete = os.path.join(self.project_root, lang) if os.path.islink(to_delete): os.unlink(to_delete) else: shutil.rmtree(to_delete)
def build(self, **kwargs): project = self.version.project os.chdir(project.conf_dir(self.version.slug)) #Default to this so we can return it always. pdf_results = (1, '', '') if project.use_virtualenv: latex_results = run('%s -b latex -d _build/doctrees . _build/latex' % project.venv_bin(version=self.version.slug, bin='sphinx-build')) else: latex_results = run('sphinx-build -b latex ' '-d _build/doctrees . _build/latex') if latex_results[0] == 0: os.chdir('_build/latex') tex_files = glob('*.tex') if tex_files: # Run LaTeX -> PDF conversions pdflatex_cmds = ['pdflatex -interaction=nonstopmode %s' % tex_file for tex_file in tex_files] pdf_results = run(*pdflatex_cmds) else: pdf_results = (0, "No tex files found", "No tex files found") if latex_results[0] != 0 or pdf_results[0] != 0: log.warning("PDF Building failed. Moving on.") return (latex_results, pdf_results)
def symlink_subprojects(self): """Symlink project subprojects Link from $WEB_ROOT/projects/<project> -> $WEB_ROOT/<project> """ subprojects = set() rels = self.get_subprojects() if rels.count(): # Don't creat the `projects/` directory unless subprojects exist. if not os.path.exists(self.subproject_root): os.makedirs(self.subproject_root) for rel in rels: # A mapping of slugs for the subproject URL to the actual built # documentation from_to = OrderedDict({rel.child.slug: rel.child.slug}) subprojects.add(rel.child.slug) if rel.alias: from_to[rel.alias] = rel.child.slug subprojects.add(rel.alias) for from_slug, to_slug in from_to.items(): self._log(u"Symlinking subproject: {0} -> {1}".format(from_slug, to_slug)) symlink = os.path.join(self.subproject_root, from_slug) docs_dir = os.path.join(self.WEB_ROOT, to_slug) symlink_dir = os.sep.join(symlink.split(os.path.sep)[:-1]) if not os.path.lexists(symlink_dir): os.makedirs(symlink_dir) run("ln -nsf %s %s" % (docs_dir, symlink)) # Remove old symlinks if os.path.exists(self.subproject_root): for subproj in os.listdir(self.subproject_root): if subproj not in subprojects: os.unlink(os.path.join(self.subproject_root, subproj))
def build(self, **kwargs): project = self.version.project os.chdir(project.conf_dir(self.version.slug)) #Default to this so we can return it always. pdf_results = (1, '', '') if project.use_virtualenv: latex_results = run( '%s -b latex -d _build/doctrees . _build/latex' % project.venv_bin(version=self.version.slug, bin='sphinx-build')) else: latex_results = run('sphinx-build -b latex ' '-d _build/doctrees . _build/latex') if latex_results[0] == 0: os.chdir('_build/latex') tex_files = glob('*.tex') if tex_files: # Run LaTeX -> PDF conversions pdflatex_cmds = [ 'pdflatex -interaction=nonstopmode %s' % tex_file for tex_file in tex_files ] pdf_results = run(*pdflatex_cmds) else: pdf_results = (0, "No tex files found", "No tex files found") if latex_results[0] != 0 or pdf_results[0] != 0: log.warning("PDF Building failed. Moving on.") return (latex_results, pdf_results)
def symlink_versions(self): """Symlink project's versions Link from $WEB_ROOT/<project>/<language>/<version>/ -> HOME/user_builds/<project>/rtd-builds/<version> """ versions = set() version_dir = os.path.join(self.WEB_ROOT, self.project.slug, self.project.language) # Include active public versions, # as well as public verisons that are built but not active, for archived versions version_queryset = (self.project.versions.protected(only_active=False).filter(built=True) | self.project.versions.protected(only_active=True)) if version_queryset.count(): if not os.path.exists(version_dir): os.makedirs(version_dir) for version in version_queryset: self._log(u"Symlinking Version: %s" % version) symlink = os.path.join(version_dir, version.slug) docs_dir = os.path.join(settings.DOCROOT, self.project.slug, 'rtd-builds', version.slug) run('ln -nsf {0} {1}'.format(docs_dir, symlink)) versions.add(version.slug) # Remove old symlinks if os.path.exists(version_dir): for old_ver in os.listdir(version_dir): if old_ver not in versions: os.unlink(os.path.join(version_dir, old_ver))
def symlink_cnames(self, domain=None): """Symlink project CNAME domains Link from HOME/$CNAME_ROOT/<cname> -> HOME/$WEB_ROOT/<project> Also give cname -> project link Link from HOME/public_cname_project/<cname> -> HOME/<project>/ """ if domain: domains = [domain] else: domains = Domain.objects.filter(project=self.project) for domain in domains: self._log(u"Symlinking CNAME: {0} -> {1}".format(domain.domain, self.project.slug)) # CNAME to doc root symlink = os.path.join(self.CNAME_ROOT, domain.domain) run('ln -nsf {0} {1}'.format(self.project_root, symlink)) # Project symlink project_cname_symlink = os.path.join(self.PROJECT_CNAME_ROOT, domain.domain) run('ln -nsf %s %s' % (self.project.doc_path, project_cname_symlink))
def unzip_files(dest_file, html_path): if not os.path.exists(html_path): os.makedirs(html_path) else: shutil.rmtree(html_path) os.makedirs(html_path) run('unzip -o %s -d %s' % (dest_file, html_path)) copy_to_app_servers(html_path, html_path)
def move(self, **kwargs): from_globs = glob(os.path.join(self.old_artifact_path, "*.epub")) if not os.path.exists(self.target): os.makedirs(self.target) if from_globs: from_file = from_globs[0] to_file = os.path.join(self.target, "%s.epub" % self.version.project.slug) run('mv -f %s %s' % (from_file, to_file))
def clear_artifacts(version_pk): """ Remove artifacts from the build server. """ version_data = api.version(version_pk).get() version = make_api_version(version_data) run('rm -rf %s' % version.project.full_epub_path(version.slug)) run('rm -rf %s' % version.project.full_man_path(version.slug)) run('rm -rf %s' % version.project.full_build_path(version.slug)) run('rm -rf %s' % version.project.full_latex_path(version.slug))
def move(self, **kwargs): project = self.version.project outputted_path = os.path.join(project.conf_dir(self.version.slug), '_build', 'epub') to_path = os.path.join(settings.MEDIA_ROOT, 'epub', project.slug, self.version.slug) from_globs = glob(os.path.join(outputted_path, "*.epub")) if from_globs: from_file = from_globs[0] to_file = os.path.join(to_path, "%s.epub" % project.slug) if getattr(settings, "MULTIPLE_APP_SERVERS", None): copy_file_to_app_servers(from_file, to_file) else: if not os.path.exists(to_path): os.makedirs(to_path) run('mv -f %s %s' % (from_file, to_file))
def symlink_single_version(self): """Symlink project single version Link from $WEB_ROOT/<project> -> HOME/user_builds/<project>/rtd-builds/latest/ """ default_version = self.project.get_default_version() self._log("Symlinking single_version") symlink = self.project_root if os.path.islink(symlink): os.unlink(symlink) if os.path.exists(symlink): shutil.rmtree(symlink) # Where the actual docs live docs_dir = os.path.join(settings.DOCROOT, self.project.slug, 'rtd-builds', default_version) run('ln -nsf %s/ %s' % (docs_dir, symlink))
def symlink_single_version(self): """Symlink project single version Link from $WEB_ROOT/<project> -> HOME/user_builds/<project>/rtd-builds/latest/ """ version = self.get_default_version() # Clean up symlinks symlink = self.project_root if os.path.islink(symlink): os.unlink(symlink) if os.path.exists(symlink): shutil.rmtree(symlink) # Create symlink if version is not None: docs_dir = os.path.join(settings.DOCROOT, self.project.slug, "rtd-builds", version.slug) run("ln -nsf %s/ %s" % (docs_dir, symlink))
def build(self, **kwargs): project = self.version.project os.chdir(self.version.project.conf_dir(self.version.slug)) if project.use_virtualenv: build_command = '%s -b man -d _build/doctrees . _build/man' % project.venv_bin( version=self.version.slug, bin='sphinx-build') else: build_command = "sphinx-build -b man . _build/man" build_results = run(build_command) return build_results
def build(self, **kwargs): project = self.version.project os.chdir(project.conf_dir(self.version.slug)) if project.use_virtualenv: build_command = '%s -b epub . _build/epub' % project.venv_bin( version=self.version.slug, bin='sphinx-build') else: build_command = "sphinx-build -b epub . _build/epub" build_results = run(build_command) return build_results
def symlink_single_version(self): """Symlink project single version Link from $WEB_ROOT/<project> -> HOME/user_builds/<project>/rtd-builds/latest/ """ version = self.get_default_version() # Clean up symlinks symlink = self.project_root if os.path.islink(symlink): os.unlink(symlink) if os.path.exists(symlink): shutil.rmtree(symlink) # Create symlink if version is not None: docs_dir = os.path.join(settings.DOCROOT, self.project.slug, 'rtd-builds', version.slug) run(['ln', '-nsf', docs_dir, symlink])
def move(self, **kwargs): #This needs to be thought about more because of all the state above. #We could just shove the filename on the instance or something. project = self.version.project os.chdir( os.path.join(project.conf_dir(self.version.slug), '_build', 'latex')) tex_files = glob('*.tex') for tex_file in tex_files: to_path = os.path.join(settings.MEDIA_ROOT, 'pdf', project.slug, self.version.slug) to_file = os.path.join(to_path, '%s.pdf' % project.slug) # pdflatex names its output predictably: foo.tex -> foo.pdf pdf_filename = os.path.splitext(tex_file)[0] + '.pdf' from_file = os.path.join(os.getcwd(), pdf_filename) if getattr(settings, "MULTIPLE_APP_SERVERS", None): copy_file_to_app_servers(from_file, to_file) else: if not os.path.exists(to_path): os.makedirs(to_path) run('mv -f %s %s' % (from_file, to_file))
def move(self, **kwargs): #This needs to be thought about more because of all the state above. #We could just shove the filename on the instance or something. project = self.version.project os.chdir(os.path.join(project.conf_dir(self.version.slug), '_build', 'latex')) tex_files = glob('*.tex') for tex_file in tex_files: to_path = os.path.join(settings.MEDIA_ROOT, 'pdf', project.slug, self.version.slug) to_file = os.path.join(to_path, '%s.pdf' % project.slug) # pdflatex names its output predictably: foo.tex -> foo.pdf pdf_filename = os.path.splitext(tex_file)[0] + '.pdf' from_file = os.path.join(os.getcwd(), pdf_filename) if getattr(settings, "MULTIPLE_APP_SERVERS", None): copy_file_to_app_servers(from_file, to_file) else: if not os.path.exists(to_path): os.makedirs(to_path) run('mv -f %s %s' % (from_file, to_file))
def build(self, **kwargs): project = self.version.project os.chdir(self.version.project.conf_dir(self.version.slug)) if project.use_virtualenv: build_command = '%s -b dirhtml . _build/html' % project.venv_bin( version=self.version.slug, bin='sphinx-build') else: build_command = "sphinx-build -b dirhtml . _build/html" build_results = run(build_command) if 'no targets are out of date.' in build_results[1]: self._changed = False return build_results
def move(self, **kwargs): project = self.version.project if project.full_build_path(self.version.slug): #Copy the html files. target = project.rtd_build_path(self.version.slug) if "_" in project.slug: new_slug = project.slug.replace('_','-') new_target = target.replace(project.slug, new_slug) #Only replace 1, so user_builds doesn't get replaced >:x targets = [target, new_target] else: targets = [target] for target in targets: if getattr(settings, "MULTIPLE_APP_SERVERS", None): log.info("Copying docs to remote server.") copy_to_app_servers(project.full_build_path(self.version.slug), target) else: if os.path.exists(target): shutil.rmtree(target) log.info("Copying docs on the local filesystem") shutil.copytree(project.full_build_path(self.version.slug), target) #Copy the zip file. to_path = os.path.join(settings.MEDIA_ROOT, 'htmlzip', project.slug, self.version.slug) to_file = os.path.join(to_path, '%s.zip' % project.slug) from_path = project.checkout_path(self.version.slug) from_file = os.path.join(from_path, '%s.zip' % project.slug) if getattr(settings, "MULTIPLE_APP_SERVERS", None): copy_file_to_app_servers(from_file, to_file) else: if not os.path.exists(to_path): os.makedirs(to_path) run('mv -f %s %s' % (from_file, to_file)) else: log.warning("Not moving docs, because the build dir is unknown.")
def build(self, **kwargs): self.clean() project = self.version.project os.chdir(project.conf_dir(self.version.slug)) # Default to this so we can return it always. results = {} latex_results = run('%s -b latex -D language=%s -d _build/doctrees . _build/latex' % (project.venv_bin(version=self.version.slug, bin='sphinx-build'), project.language)) if latex_results[0] == 0: os.chdir('_build/latex') tex_files = glob('*.tex') if tex_files: # Run LaTeX -> PDF conversions pdflatex_cmds = [('pdflatex -interaction=nonstopmode %s' % tex_file) for tex_file in tex_files] makeindex_cmds = [('makeindex -s python.ist %s.idx' % os.path.splitext(tex_file)[0]) for tex_file in tex_files] pdf_results = run(*pdflatex_cmds) ind_results = run(*makeindex_cmds) pdf_results = run(*pdflatex_cmds) else: pdf_results = (0, "No tex files found", "No tex files found") ind_results = (0, "No tex files found", "No tex files found") results = [ latex_results[0] + ind_results[0] + pdf_results[0], latex_results[1] + ind_results[1] + pdf_results[1], latex_results[2] + ind_results[2] + pdf_results[2], ] pdf_match = PDF_RE.search(results[1]) if pdf_match: self.pdf_file_name = pdf_match.group(1).strip() else: results = latex_results return results
def build(self, **kwargs): checkout_path = self.version.project.checkout_path(self.version.slug) # site_path = os.path.join(checkout_path, 'site') os.chdir(checkout_path) # Actual build build_command = ( "{command} {builder} --clean --site-dir={build_dir} --theme=readthedocs" .format( command=self.version.project.venv_bin(version=self.version.slug, bin='mkdocs'), builder=self.builder, build_dir=self.build_dir, )) results = run(build_command, shell=True) return results
def build(self, **kwargs): self.clean() project = self.version.project os.chdir(project.conf_dir(self.version.slug)) force_str = " -E " if self._force else "" build_command = "%s -T %s -b %s -d _build/doctrees -D language=%s . %s " % ( project.venv_bin(version=self.version.slug, bin='sphinx-build'), force_str, self.sphinx_builder, project.language, self.sphinx_build_dir, ) results = run(build_command, shell=True) return results
def move(self, **kwargs): if not os.path.exists(self.target): os.makedirs(self.target) exact = os.path.join(self.old_artifact_path, "%s.pdf" % self.version.project.slug) exact_upper = os.path.join( self.old_artifact_path, "%s.pdf" % self.version.project.slug.capitalize()) if self.pdf_file_name and os.path.exists(self.pdf_file_name): from_file = self.pdf_file_name if os.path.exists(exact): from_file = exact elif os.path.exists(exact_upper): from_file = exact_upper else: from_globs = glob(os.path.join(self.old_artifact_path, "*.pdf")) if from_globs: from_file = max(from_globs, key=os.path.getmtime) else: from_file = None if from_file: to_file = os.path.join(self.target, "%s.pdf" % self.version.project.slug) run('mv -f %s %s' % (from_file, to_file))
def build(self, **kwargs): checkout_path = self.version.project.checkout_path(self.version.slug) # site_path = os.path.join(checkout_path, 'site') os.chdir(checkout_path) # Actual build build_command = ( "{command} {builder} --clean --site-dir={build_dir} --theme=readthedocs" .format( command=self.version.project.venv_bin( version=self.version.slug, bin='mkdocs'), builder=self.builder, build_dir=self.build_dir, )) results = run(build_command, shell=True) return results
def build(self, **kwargs): id_dir = "/tmp/" project = self.version.project os.chdir(project.conf_dir(self.version.slug)) force_str = " -E " if self.force else "" if project.use_virtualenv: build_command = "%s %s -b html . _build/html " % ( project.venv_bin( version=self.version.slug, bin='sphinx-build'), force_str) else: build_command = "sphinx-build %s -b html . _build/html" % (force_str) build_results = run(build_command, shell=True) self._zip_html() if 'no targets are out of date.' in build_results[1]: self._changed = False return build_results
def symlink_subprojects(self): """ Symlink project subprojects. Link from $WEB_ROOT/projects/<project> -> $WEB_ROOT/<project> """ subprojects = set() rels = self.get_subprojects() if rels.count(): # Don't create the `projects/` directory unless subprojects exist. if not os.path.exists(self.subproject_root): safe_makedirs(self.subproject_root) for rel in rels: # A mapping of slugs for the subproject URL to the actual built # documentation from_to = OrderedDict({rel.child.slug: rel.child.slug}) subprojects.add(rel.child.slug) if rel.alias: from_to[rel.alias] = rel.child.slug subprojects.add(rel.alias) for from_slug, to_slug in list(from_to.items()): log_msg = "Symlinking subproject: {0} -> {1}".format( from_slug, to_slug) log.info( constants.LOG_TEMPLATE.format(project=self.project.slug, version='', msg=log_msg)) symlink = os.path.join(self.subproject_root, from_slug) docs_dir = os.path.join(self.WEB_ROOT, to_slug) symlink_dir = os.sep.join(symlink.split(os.path.sep)[:-1]) if not os.path.lexists(symlink_dir): safe_makedirs(symlink_dir) # TODO this should use os.symlink, not a call to shell. For now, # this passes command as a list to be explicit about escaping # characters like spaces. status, _, stderr = run(['ln', '-nsf', docs_dir, symlink]) if status > 0: log.error('Could not symlink path: status=%d error=%s', status, stderr) # Remove old symlinks if os.path.exists(self.subproject_root): for subproj in os.listdir(self.subproject_root): if subproj not in subprojects: os.unlink(os.path.join(self.subproject_root, subproj))
def symlink_subprojects(self): """ Symlink project subprojects. Link from $WEB_ROOT/projects/<project> -> $WEB_ROOT/<project> """ subprojects = set() rels = self.get_subprojects() if rels.count(): # Don't creat the `projects/` directory unless subprojects exist. if not os.path.exists(self.subproject_root): safe_makedirs(self.subproject_root) for rel in rels: # A mapping of slugs for the subproject URL to the actual built # documentation from_to = OrderedDict({rel.child.slug: rel.child.slug}) subprojects.add(rel.child.slug) if rel.alias: from_to[rel.alias] = rel.child.slug subprojects.add(rel.alias) for from_slug, to_slug in list(from_to.items()): self._log(u"Symlinking subproject: {0} -> {1}".format(from_slug, to_slug)) symlink = os.path.join(self.subproject_root, from_slug) docs_dir = os.path.join( self.WEB_ROOT, to_slug ) symlink_dir = os.sep.join(symlink.split(os.path.sep)[:-1]) if not os.path.lexists(symlink_dir): safe_makedirs(symlink_dir) # TODO this should use os.symlink, not a call to shell. For now, # this passes command as a list to be explicit about escaping # characters like spaces. status, _, stderr = run(['ln', '-nsf', docs_dir, symlink]) if status > 0: log.error('Could not symlink path: status=%d error=%s', status, stderr) # Remove old symlinks if os.path.exists(self.subproject_root): for subproj in os.listdir(self.subproject_root): if subproj not in subprojects: os.unlink(os.path.join(self.subproject_root, subproj))
def update_imported_docs(version_pk): """ Check out or update the given project's repository. """ version_data = api.version(version_pk).get() version = make_api_version(version_data) project = version.project # Make Dirs if not os.path.exists(project.doc_path): os.makedirs(project.doc_path) with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)): update_docs_output = {} if not project.vcs_repo(): raise ProjectImportError("Repo type '{repo_type}' unknown".format( repo_type=project.repo_type)) # Get the actual code on disk if version: log.info('Checking out version {slug}: {identifier}'.format( slug=version.slug, identifier=version.identifier)) version_slug = version.slug version_repo = project.vcs_repo(version_slug) update_docs_output['checkout'] = version_repo.checkout( version.identifier) else: # Does this ever get called? log.info('Updating to latest revision') version_slug = 'latest' version_repo = project.vcs_repo(version_slug) update_docs_output['checkout'] = version_repo.update() # Ensure we have a conf file (an exception is raised if not) project.conf_file(version.slug) # Do Virtualenv bits: if project.use_virtualenv: if project.use_system_packages: site_packages = '--system-site-packages' else: site_packages = '--no-site-packages' update_docs_output['venv'] = run( '{cmd} --distribute {site_packages} {path}'.format( cmd='virtualenv', site_packages=site_packages, path=project.venv_path(version=version_slug))) # Other code expects sphinx-build to be installed inside the virtualenv. # Using the -I option makes sure it gets installed even if it is # already installed system-wide (and --system-site-packages is used) if project.use_system_packages: ignore_option = '-I' else: ignore_option = '' update_docs_output['sphinx'] = run( '{cmd} install -U {ignore_option} hg+http://bitbucket.org/birkenfeld/sphinx/@d4c6ac1fcc9c#egg=Sphinx virtualenv==1.8.2 distribute==0.6.28 docutils==0.8.1' .format(cmd=project.venv_bin(version=version_slug, bin='pip'), ignore_option=ignore_option)) if project.requirements_file: os.chdir(project.checkout_path(version_slug)) update_docs_output['requirements'] = run( '{cmd} install -r {requirements}'.format( cmd=project.venv_bin(version=version_slug, bin='pip'), requirements=project.requirements_file)) os.chdir(project.checkout_path(version_slug)) update_docs_output['install'] = run( '{cmd} setup.py install --force'.format( cmd=project.venv_bin(version=version_slug, bin='python'))) # check tags/version #XXX:dc: what in this block raises the values error? try: old_versions = [ obj['identifier'] for obj in api.version.get(project__slug=project.slug, limit=5000)['objects'] ] if version_repo.supports_tags: transaction.enter_transaction_management(True) tags = version_repo.tags for tag in tags: if tag.identifier in old_versions: continue log.debug('NEW TAG: (%s not in %s)' % (tag.identifier, old_versions)) slug = slugify_uniquely(Version, tag.verbose_name, 'slug', 255, project=project) try: version_data = api.version.post( dict(project="/api/v1/project/%s/" % project.pk, slug=slug, identifier=tag.identifier, verbose_name=tag.verbose_name)) ver = make_api_version(version_data) log.info("New tag found: {0}".format(tag.identifier)) ver, highest = project.highest_version[1] ver_obj = mkversion(ver) #TODO: Handle updating higher versions automatically. #This never worked very well, anyways. if highest and ver_obj and ver_obj > highest: log.info("Highest version known, building docs") update_docs.delay(ver.project.pk, version_pk=ver.pk) except Exception, e: log.error("Failed to create version (tag)", exc_info=True) transaction.rollback() transaction.leave_transaction_management() if version_repo.supports_branches: transaction.enter_transaction_management(True) branches = version_repo.branches for branch in branches: if branch.identifier in old_versions: continue log.debug('NEW BRANCH: (%s not in %s)' % (branch, old_versions)) slug = slugify_uniquely(Version, branch.verbose_name, 'slug', 255, project=project) try: api.version.post( dict(project="/api/v1/project/%s/" % project.pk, slug=slug, identifier=branch.identifier, verbose_name=branch.verbose_name)) log.info("New branch found: {0}".format( branch.identifier)) except Exception, e: log.error("Failed to create version (branch)", exc_info=True) transaction.rollback() transaction.leave_transaction_management()
def setup_environment(version): """ Build the virtualenv and install the project into it. Always build projects with a virtualenv. """ ret_dict = {} project = version.project build_dir = os.path.join(project.venv_path(version=version.slug), 'build') if os.path.exists(build_dir): log.info(LOG_TEMPLATE.format(project=project.slug, version=version.slug, msg='Removing existing build dir')) shutil.rmtree(build_dir) if project.use_system_packages: site_packages = '--system-site-packages' else: site_packages = '--no-site-packages' # Here the command has been modified to support different # interpreters. ret_dict['venv'] = run( '{cmd} {site_packages} {path}'.format( cmd='{interpreter} -m virtualenv'.format( interpreter=project.python_interpreter), site_packages=site_packages, path=project.venv_path(version=version.slug) ) ) # Other code expects sphinx-build to be installed inside the # virtualenv. Using the -I option makes sure it gets installed # even if it is already installed system-wide (and # --system-site-packages is used) if project.use_system_packages: ignore_option = '-I' else: ignore_option = '' requirements = ' '.join([ 'sphinx==1.3.1', 'Pygments==2.0.2', 'virtualenv==13.1.0', 'setuptools==18.0.1', 'docutils==0.11', 'mkdocs==0.14.0', 'mock==1.0.1', 'pillow==2.6.1', 'readthedocs-sphinx-ext==0.5.4', 'sphinx-rtd-theme==0.1.8', 'alabaster>=0.7,<0.8,!=0.7.5', 'recommonmark==0.2.0', ]) wheeldir = os.path.join(settings.SITE_ROOT, 'deploy', 'wheels') ret_dict['doc_builder'] = run( ( '{cmd} install --use-wheel --find-links={wheeldir} -U ' '{ignore_option} {requirements}' ).format( cmd=project.venv_bin(version=version.slug, bin='pip'), ignore_option=ignore_option, wheeldir=wheeldir, requirements=requirements, ) ) # Handle requirements requirements_file_path = project.requirements_file checkout_path = project.checkout_path(version.slug) if not requirements_file_path: builder_class = get_builder_class(project.documentation_type) docs_dir = builder_class(version).docs_dir() for path in [docs_dir, '']: for req_file in ['pip_requirements.txt', 'requirements.txt']: test_path = os.path.join(checkout_path, path, req_file) print('Testing %s' % test_path) if os.path.exists(test_path): requirements_file_path = test_path break if requirements_file_path: os.chdir(checkout_path) ret_dict['requirements'] = run( '{cmd} install --exists-action=w -r {requirements}'.format( cmd=project.venv_bin(version=version.slug, bin='pip'), requirements=requirements_file_path)) # Handle setup.py os.chdir(project.checkout_path(version.slug)) if os.path.isfile("setup.py"): if getattr(settings, 'USE_PIP_INSTALL', False): ret_dict['install'] = run( '{cmd} install --ignore-installed .'.format( cmd=project.venv_bin(version=version.slug, bin='pip'))) else: ret_dict['install'] = run( '{cmd} setup.py install --force'.format( cmd=project.venv_bin(version=version.slug, bin='python'))) else: ret_dict['install'] = (999, "", "No setup.py, skipping install") return ret_dict
def update_imported_docs(version_pk): """ Check out or update the given project's repository. """ version_data = api.version(version_pk).get() version = make_api_version(version_data) project = version.project # Make Dirs if not os.path.exists(project.doc_path): os.makedirs(project.doc_path) with project.repo_lock(getattr(settings, 'REPO_LOCK_SECONDS', 30)): update_docs_output = {} if not project.vcs_repo(): raise ProjectImportError("Repo type '{repo_type}' unknown".format( repo_type=project.repo_type)) # Get the actual code on disk if version: log.info('Checking out version {slug}: {identifier}'.format( slug=version.slug, identifier=version.identifier)) version_slug = version.slug version_repo = project.vcs_repo(version_slug) update_docs_output['checkout'] = version_repo.checkout(version.identifier) else: # Does this ever get called? log.info('Updating to latest revision') version_slug = 'latest' version_repo = project.vcs_repo(version_slug) update_docs_output['checkout'] = version_repo.update() # Ensure we have a conf file (an exception is raised if not) project.conf_file(version.slug) # Do Virtualenv bits: if project.use_virtualenv: if project.use_system_packages: site_packages = '--system-site-packages' else: site_packages = '--no-site-packages' update_docs_output['venv'] = run('{cmd} --distribute {site_packages} {path}'.format( cmd='virtualenv', site_packages=site_packages, path=project.venv_path(version=version_slug))) # Other code expects sphinx-build to be installed inside the virtualenv. # Using the -I option makes sure it gets installed even if it is # already installed system-wide (and --system-site-packages is used) if project.use_system_packages: ignore_option = '-I' else: ignore_option = '' update_docs_output['sphinx'] = run('{cmd} install -U {ignore_option} hg+http://bitbucket.org/birkenfeld/sphinx/@d4c6ac1fcc9c#egg=Sphinx virtualenv==1.8.2 distribute==0.6.28 docutils==0.8.1'.format( cmd=project.venv_bin(version=version_slug, bin='pip'), ignore_option=ignore_option)) if project.requirements_file: os.chdir(project.checkout_path(version_slug)) update_docs_output['requirements'] = run('{cmd} install -r {requirements}'.format( cmd=project.venv_bin(version=version_slug, bin='pip'), requirements=project.requirements_file)) os.chdir(project.checkout_path(version_slug)) update_docs_output['install'] = run('{cmd} setup.py install --force'.format( cmd=project.venv_bin(version=version_slug, bin='python'))) # check tags/version #XXX:dc: what in this block raises the values error? try: old_versions = [obj['identifier'] for obj in api.version.get(project__slug=project.slug, limit=5000)['objects']] if version_repo.supports_tags: transaction.enter_transaction_management(True) tags = version_repo.tags for tag in tags: if tag.identifier in old_versions: continue log.debug('NEW TAG: (%s not in %s)' % (tag.identifier, old_versions)) slug = slugify_uniquely(Version, tag.verbose_name, 'slug', 255, project=project) try: version_data = api.version.post(dict( project="/api/v1/project/%s/" % project.pk, slug=slug, identifier=tag.identifier, verbose_name=tag.verbose_name )) ver = make_api_version(version_data) log.info("New tag found: {0}".format(tag.identifier)) ver, highest = project.highest_version[1] ver_obj = mkversion(ver) #TODO: Handle updating higher versions automatically. #This never worked very well, anyways. if highest and ver_obj and ver_obj > highest: log.info("Highest version known, building docs") update_docs.delay(ver.project.pk, version_pk=ver.pk) except Exception, e: log.error("Failed to create version (tag)", exc_info=True) transaction.rollback() transaction.leave_transaction_management() if version_repo.supports_branches: transaction.enter_transaction_management(True) branches = version_repo.branches for branch in branches: if branch.identifier in old_versions: continue log.debug('NEW BRANCH: (%s not in %s)' % (branch, old_versions)) slug = slugify_uniquely(Version, branch.verbose_name, 'slug', 255, project=project) try: api.version.post(dict( project="/api/v1/project/%s/" % project.pk, slug=slug, identifier=branch.identifier, verbose_name=branch.verbose_name )) log.info("New branch found: {0}".format(branch.identifier)) except Exception, e: log.error("Failed to create version (branch)", exc_info=True) transaction.rollback() transaction.leave_transaction_management()