Ejemplo n.º 1
0
    def build_docs(
        self,
        path=None,
        fmt="html",
        outdir=None,
        auto_open=True,
        serve=True,
        http=None,
        archive=False,
        upload=False,
        jobs=None,
        write_url=None,
        verbose=None,
    ):
        if self.check_jsdoc():
            return die(JSDOC_NOT_FOUND)

        self.activate_virtualenv()
        self.virtualenv_manager.install_pip_requirements(
            os.path.join(here, "requirements.txt"))

        import webbrowser
        from livereload import Server
        from moztreedocs.package import create_tarball

        unique_id = str(uuid.uuid1())

        outdir = outdir or os.path.join(self.topobjdir, "docs")
        savedir = os.path.join(outdir, fmt)

        path = path or self.topsrcdir
        path = os.path.normpath(os.path.abspath(path))

        docdir = self._find_doc_dir(path)
        if not docdir:
            print(self._dump_sphinx_backtrace())
            return die("failed to generate documentation:\n"
                       "%s: could not find docs at this location" % path)

        result = self._run_sphinx(docdir,
                                  savedir,
                                  fmt=fmt,
                                  jobs=jobs,
                                  verbose=verbose)
        if result != 0:
            print(self._dump_sphinx_backtrace())
            return die("failed to generate documentation:\n"
                       "%s: sphinx return code %d" % (path, result))
        else:
            print("\nGenerated documentation:\n%s" % savedir)

        print("Post processing HTML files")
        self._post_process_html(savedir)

        # Upload the artifact containing the link to S3
        # This would be used by code-review to post the link to Phabricator
        if write_url is not None:
            base_link = (
                "http://gecko-docs.mozilla.org-l1.s3-website.us-west-2.amazonaws.com/"
            )
            unique_link = base_link + unique_id + "/index.html"
            with open(write_url, "w") as fp:
                fp.write(unique_link)
                fp.flush()

        if archive:
            archive_path = os.path.join(outdir, "%s.tar.gz" % self.project)
            create_tarball(archive_path, savedir)
            print("Archived to %s" % archive_path)

        if upload:
            self._s3_upload(savedir, self.project, unique_id, self.version)

        if not serve:
            index_path = os.path.join(savedir, "index.html")
            if auto_open and os.path.isfile(index_path):
                webbrowser.open(index_path)
            return

        # Create livereload server. Any files modified in the specified docdir
        # will cause a re-build and refresh of the browser (if open).
        try:
            host, port = http.split(":", 1)
            port = int(port)
        except ValueError:
            return die("invalid address: %s" % http)

        server = Server()

        sphinx_trees = self.manager.trees or {savedir: docdir}
        for _, src in sphinx_trees.items():
            run_sphinx = partial(self._run_sphinx,
                                 src,
                                 savedir,
                                 fmt=fmt,
                                 jobs=jobs,
                                 verbose=verbose)
            server.watch(src, run_sphinx)
        server.serve(
            host=host,
            port=port,
            root=savedir,
            open_url_delay=0.1 if auto_open else None,
        )
Ejemplo n.º 2
0
    def build_docs(
        self,
        path=None,
        fmt="html",
        outdir=None,
        auto_open=True,
        serve=True,
        http=None,
        archive=False,
        upload=False,
        jobs=None,
        write_url=None,
        verbose=None,
    ):

        # TODO: Bug 1704891 - move the ESLint setup tools to a shared place.
        sys.path.append(mozpath.join(self.topsrcdir, "tools", "lint",
                                     "eslint"))
        import setup_helper

        setup_helper.set_project_root(self.topsrcdir)

        if not setup_helper.check_node_executables_valid():
            return 1

        setup_helper.eslint_maybe_setup()

        # Set the path so that Sphinx can find jsdoc, unfortunately there isn't
        # a way to pass this to Sphinx itself at the moment.
        os.environ["PATH"] = (
            mozpath.join(self.topsrcdir, "node_modules", ".bin") + os.pathsep +
            self._node_path() + os.pathsep + os.environ["PATH"])

        self.activate_virtualenv()
        self.virtualenv_manager.install_pip_requirements(
            os.path.join(here, "requirements.txt"))

        import webbrowser
        from livereload import Server
        from moztreedocs.package import create_tarball

        unique_id = "%s/%s" % (self.project, str(uuid.uuid1()))

        outdir = outdir or os.path.join(self.topobjdir, "docs")
        savedir = os.path.join(outdir, fmt)

        path = path or self.topsrcdir
        path = os.path.normpath(os.path.abspath(path))

        docdir = self._find_doc_dir(path)
        if not docdir:
            print(self._dump_sphinx_backtrace())
            return die("failed to generate documentation:\n"
                       "%s: could not find docs at this location" % path)

        result = self._run_sphinx(docdir,
                                  savedir,
                                  fmt=fmt,
                                  jobs=jobs,
                                  verbose=verbose)
        if result != 0:
            print(self._dump_sphinx_backtrace())
            return die("failed to generate documentation:\n"
                       "%s: sphinx return code %d" % (path, result))
        else:
            print("\nGenerated documentation:\n%s" % savedir)

        # Upload the artifact containing the link to S3
        # This would be used by code-review to post the link to Phabricator
        if write_url is not None:
            unique_link = BASE_LINK + unique_id + "/index.html"
            with open(write_url, "w") as fp:
                fp.write(unique_link)
                fp.flush()
            print("Generated " + write_url)

        if archive:
            archive_path = os.path.join(outdir, "%s.tar.gz" % self.project)
            create_tarball(archive_path, savedir)
            print("Archived to %s" % archive_path)

        if upload:
            self._s3_upload(savedir, self.project, unique_id, self.version)

        if not serve:
            index_path = os.path.join(savedir, "index.html")
            if auto_open and os.path.isfile(index_path):
                webbrowser.open(index_path)
            return

        # Create livereload server. Any files modified in the specified docdir
        # will cause a re-build and refresh of the browser (if open).
        try:
            host, port = http.split(":", 1)
            port = int(port)
        except ValueError:
            return die("invalid address: %s" % http)

        server = Server()

        sphinx_trees = self.manager.trees or {savedir: docdir}
        for _, src in sphinx_trees.items():
            run_sphinx = partial(self._run_sphinx,
                                 src,
                                 savedir,
                                 fmt=fmt,
                                 jobs=jobs,
                                 verbose=verbose)
            server.watch(src, run_sphinx)
        server.serve(
            host=host,
            port=port,
            root=savedir,
            open_url_delay=0.1 if auto_open else None,
        )
Ejemplo n.º 3
0
    def build_docs(self,
                   path=None,
                   fmt='html',
                   outdir=None,
                   auto_open=True,
                   serve=True,
                   http=None,
                   archive=False,
                   upload=False):
        try:
            which.which('jsdoc')
        except which.WhichError:
            return die('jsdoc not found - please install from npm.')

        self.activate_pipenv(os.path.join(here, 'Pipfile'))

        import webbrowser
        from livereload import Server
        from moztreedocs.package import create_tarball

        outdir = outdir or os.path.join(self.topobjdir, 'docs')
        savedir = os.path.join(outdir, fmt)

        path = path or os.path.join(self.topsrcdir, 'tools')
        path = os.path.normpath(os.path.abspath(path))

        docdir = self._find_doc_dir(path)
        if not docdir:
            return die('failed to generate documentation:\n'
                       '%s: could not find docs at this location' % path)

        result = self._run_sphinx(docdir, savedir, fmt=fmt)
        if result != 0:
            return die('failed to generate documentation:\n'
                       '%s: sphinx return code %d' % (path, result))
        else:
            print('\nGenerated documentation:\n%s' % savedir)

        if archive:
            archive_path = os.path.join(outdir, '%s.tar.gz' % self.project)
            create_tarball(archive_path, savedir)
            print('Archived to %s' % archive_path)

        if upload:
            self._s3_upload(savedir, self.project, self.version)

        if not serve:
            index_path = os.path.join(savedir, 'index.html')
            if auto_open and os.path.isfile(index_path):
                webbrowser.open(index_path)
            return

        # Create livereload server. Any files modified in the specified docdir
        # will cause a re-build and refresh of the browser (if open).
        try:
            host, port = http.split(':', 1)
            port = int(port)
        except ValueError:
            return die('invalid address: %s' % http)

        server = Server()

        sphinx_trees = self.manager.trees or {savedir: docdir}
        for dest, src in sphinx_trees.items():
            run_sphinx = partial(self._run_sphinx, src, savedir, fmt=fmt)
            server.watch(src, run_sphinx)
        server.serve(host=host,
                     port=port,
                     root=savedir,
                     open_url_delay=0.1 if auto_open else None)