Ejemplo n.º 1
0
def _cleanupRstFmt(filename):
    updated_contents = contents = getFileContents(filename, mode="rb")

    for keyword in extra_rst_keywords:
        updated_contents = updated_contents.replace(b".. %s::" % keyword,
                                                    b".. raw:: %s" % keyword)

    if updated_contents != contents:
        with open(filename, "wb") as out_file:
            out_file.write(updated_contents)

    rstfmt_call = _getPythonBinaryCall("rstfmt")

    check_call(rstfmt_call + [
        filename,
    ],
               #        stdout=devnull,
               )

    cleanupWindowsNewlines(filename)

    contents = getFileContents(filename, mode="rb")

    # Enforce choice between "bash" and "sh" for code directive. Use bash as
    # more people will know it.
    updated_contents = contents.replace(b".. code:: sh\n", b".. code:: bash\n")

    for keyword in extra_rst_keywords:
        updated_contents = updated_contents.replace(b".. raw:: %s" % keyword,
                                                    b".. %s::" % keyword)

    lines = []
    inside = False
    needs_empty = False

    for line in updated_contents.splitlines():
        if line.startswith(b"-"):
            if inside and needs_empty:
                lines.append(b"")

            inside = True
            needs_empty = True
            lines.append(line)
        elif inside and line == b"":
            needs_empty = False
            lines.append(line)
        elif inside and line.startswith(b"  "):
            needs_empty = True
            lines.append(line)
        else:
            inside = False
            lines.append(line)

    updated_contents = b"\n".join(lines) + b"\n"

    if updated_contents != contents:
        with open(filename, "wb") as out_file:
            out_file.write(updated_contents)
Ejemplo n.º 2
0
def autoformat(filename, abort=False):
    filename = os.path.normpath(filename)

    my_print("Consider", filename, end=": ")

    old_code = getFileContents(filename)

    is_python = _isPythonFile(filename)

    is_c = filename.endswith((".c", ".h"))

    # Some parts of Nuitka must not be re-formatted with black or clang-format
    # as they have different intentions.
    if _shouldNotFormatCode(filename):
        is_python = is_c = False

    # Work on a temporary copy
    tmp_filename = filename + ".tmp"
    shutil.copy(filename, tmp_filename)

    try:
        if is_python:
            _cleanupPyLintComments(tmp_filename, abort)
            _cleanupImportSortOrder(tmp_filename)

        if is_python:
            black_call = _getPythonBinaryCall("black")

            subprocess.call(black_call + ["-q", tmp_filename])
        elif is_c:
            _cleanupClangFormat(filename)
        else:
            _cleanupTrailingWhitespace(tmp_filename)

        if getOS() == "Windows":
            _cleanupWindowsNewlines(tmp_filename)

        changed = False
        if old_code != getFileContents(tmp_filename):
            my_print("Updated.")

            renameFile(tmp_filename, filename)

            changed = True
        else:
            my_print("OK.")

        return changed
    finally:
        if os.path.exists(tmp_filename):
            os.unlink(tmp_filename)
Ejemplo n.º 3
0
 def getFileContents(self):
     if self.kind == "data_file":
         return getFileContents(filename=self.source_path, mode="rb")
     elif self.kind == "data_blob":
         return self.data
     else:
         assert False
Ejemplo n.º 4
0
def _cleanupPyLintComments(filename, abort):
    from baron.parser import (  # pylint: disable=I0021,import-error,no-name-in-module
        ParsingError,  # @UnresolvedImport
    )
    from redbaron import (  # pylint: disable=I0021,import-error,no-name-in-module
        RedBaron,  # @UnresolvedImport
    )

    old_code = getFileContents(filename)

    try:
        red = RedBaron(old_code)
        # red = RedBaron(old_code.rstrip()+'\n')
    except ParsingError:
        if abort:
            raise

        my_print("PARSING ERROR.")
        return 2

    for node in red.find_all("CommentNode"):
        try:
            _updateCommentNode(node)
        except Exception:
            my_print("Problem with", node)
            node.help(deep=True, with_formatting=True)
            raise

    new_code = red.dumps()

    if new_code != old_code:
        with open(filename, "w") as source_code:
            source_code.write(red.dumps())
Ejemplo n.º 5
0
def _cleanupPyLintComments(filename, abort):
    from redbaron import (  # pylint: disable=I0021,import-error,no-name-in-module
        RedBaron, )

    old_code = getFileContents(filename)

    # Baron does assertions too, and all kinds of strange errors, pylint: disable=broad-except

    try:
        red = RedBaron(old_code)
    except Exception:
        if abort:
            raise

        return

    for node in red.find_all("CommentNode"):
        try:
            _updateCommentNode(node)
        except Exception:
            my_print("Problem with", node)
            node.help(deep=True, with_formatting=True)
            raise

    new_code = red.dumps()

    if new_code != old_code:
        with open(filename, "w") as source_code:
            source_code.write(red.dumps())
Ejemplo n.º 6
0
def getCachedImportedModulesNames(module_name, source_code):
    cache_name = makeCacheName(module_name, source_code)

    return [
        ModuleName(line) for line in getFileContents(
            _getCacheFilename(cache_name, "txt")).strip().split("\n")
    ]
Ejemplo n.º 7
0
def setupCacheHashSalt(test_code_path):
    assert os.path.exists(test_code_path)

    if os.path.exists(os.path.join(test_code_path, ".git")):
        git_cmd = ["git", "ls-tree", "-r", "HEAD", test_code_path]

        process = subprocess.Popen(
            args=git_cmd,
            stdin=getNullInput(),
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
        )

        stdout_git, stderr_git = process.communicate()
        assert process.returncode == 0, stderr_git

        salt_value = hashlib.md5(stdout_git)
    else:
        salt_value = hashlib.md5()

        for filename in getFileList(test_code_path):
            if filename.endswith(".py"):
                salt_value.update(getFileContents(filename, mode="rb"))

    os.environ["NUITKA_HASH_SALT"] = salt_value.hexdigest()
Ejemplo n.º 8
0
    def __init__(self, tests_path):
        SearchModeBase.__init__(self)

        tests_path = os.path.normcase(os.path.abspath(tests_path))
        version = sys.version

        if str is not bytes:
            tests_path = tests_path.encode("utf8")
            version = version.encode("utf8")

        case_hash = hashlib.md5(tests_path)
        case_hash.update(version)

        from .Common import getTestingCacheDir

        cache_filename = os.path.join(getTestingCacheDir(),
                                      case_hash.hexdigest())

        self.cache_filename = cache_filename

        if os.path.exists(cache_filename):
            self.resume_from = getFileContents(cache_filename) or None
        else:
            self.resume_from = None

        self.active = not self.resume_from
Ejemplo n.º 9
0
def _cleanupPyLintComments(filename, abort):
    from baron.parser import (  # pylint: disable=I0021,import-error,no-name-in-module
        ParsingError,  # @UnresolvedImport
    )
    from redbaron import (  # pylint: disable=I0021,import-error,no-name-in-module
        RedBaron,  # @UnresolvedImport
    )

    old_code = getFileContents(filename)

    try:
        red = RedBaron(old_code)
        # red = RedBaron(old_code.rstrip()+'\n')
    except ParsingError:
        if abort:
            raise

        my_print("PARSING ERROR.")
        return 2

    for node in red.find_all("CommentNode"):
        try:
            _updateCommentNode(node)
        except Exception:
            my_print("Problem with", node)
            node.help(deep=True, with_formatting=True)
            raise

    new_code = red.dumps()

    if new_code != old_code:
        with open(filename, "w") as source_code:
            source_code.write(red.dumps())
Ejemplo n.º 10
0
def _getCacheFilename(dependency_tool, is_main_executable, source_dir,
                      original_dir, binary_filename):
    original_filename = os.path.join(original_dir,
                                     os.path.basename(binary_filename))
    original_filename = os.path.normcase(original_filename)

    if is_main_executable:
        # Normalize main program name for caching as well, but need to use the
        # scons information to distinguish different compilers, so we use
        # different libs there.
        hashed_value = getFileContents(
            os.path.join(source_dir, "scons-report.txt"))
    else:
        hashed_value = original_filename

    # Have different values for different Python major versions.
    hashed_value += sys.version + sys.executable

    if str is not bytes:
        hashed_value = hashed_value.encode("utf8")

    cache_dir = os.path.join(getCacheDir(), "library_deps", dependency_tool)

    makePath(cache_dir)

    return os.path.join(cache_dir, hashlib.md5(hashed_value).hexdigest())
Ejemplo n.º 11
0
def checkCachingSuccess(source_dir):
    ccache_logfile = getSconsReportValue(source_dir=source_dir,
                                         key="CCACHE_LOGFILE")

    if ccache_logfile is not None:
        stats = _getCcacheStatistics(ccache_logfile)

        if not stats:
            scons_logger.warning("You are not using ccache.")
        else:
            counts = defaultdict(int)

            for _command, result in stats.items():
                # These are not important to our users, time based decisions differentiate these.
                if result in ("cache hit (direct)",
                              "cache hit (preprocessed)"):
                    result = "cache hit"

                # Newer ccache has these, but they duplicate:
                if result in (
                        "direct_cache_hit",
                        "direct_cache_miss",
                        "preprocessed_cache_hit",
                        "preprocessed_cache_miss",
                        "primary_storage_miss",
                ):
                    continue
                if result == "primary_storage_hit":
                    result = "cache hit"
                if result == "cache_miss":
                    result = "cache miss"

                # Usage of incbin causes this for the constants blob integration.
                if result in ("unsupported code directive", "disabled"):
                    continue

                counts[result] += 1

            scons_logger.info("Compiled %d C files using ccache." % len(stats))
            for result, count in counts.items():
                scons_logger.info(
                    "Cached C files (using ccache) with result '%s': %d" %
                    (result, count))

    if os.name == "nt":
        clcache_stats_filename = getSconsReportValue(source_dir=source_dir,
                                                     key="CLCACHE_STATS")

        if clcache_stats_filename is not None and os.path.exists(
                clcache_stats_filename):
            stats = eval(  # lazy, pylint: disable=eval-used
                getFileContents(clcache_stats_filename))

            clcache_hit = stats["CacheHits"]
            clcache_miss = stats["CacheMisses"]

            scons_logger.info(
                "Compiled %d C files using clcache with %d cache hits and %d cache misses."
                % (clcache_hit + clcache_miss, clcache_hit, clcache_miss))
Ejemplo n.º 12
0
def _cleanupPyLintComments(filename):
    new_code = old_code = getFileContents(filename, encoding="utf8")

    def replacer(part):
        def renamer(pylint_token):
            # pylint: disable=too-many-branches,too-many-return-statements
            if pylint_token == "E0602":
                return "undefined-variable"
            elif pylint_token in ("E0401", "F0401"):
                return "import-error"
            elif pylint_token == "E1102":
                return "not-callable"
            elif pylint_token == "E1133":
                return " not-an-iterable"
            elif pylint_token == "E1128":
                return "assignment-from-none"
            # Save line length for this until isort is better at long lines.
            elif pylint_token == "useless-suppression":
                return "I0021"
            elif pylint_token == "R0911":
                return "too-many-return-statements"
            elif pylint_token == "R0201":
                return "no-self-use"
            elif pylint_token == "R0902":
                return "too-many-instance-attributes"
            elif pylint_token == "R0912":
                return "too-many-branches"
            elif pylint_token == "R0914":
                return "too-many-locals"
            elif pylint_token == "R0915":
                return "too-many-statements"
            elif pylint_token == "W0123":
                return "eval-used"
            elif pylint_token == "W0603":
                return "global-statement"
            elif pylint_token == "W0613":
                return "unused-argument"
            elif pylint_token == "W0622":
                return "redefined-builtin"
            elif pylint_token == "W0703":
                return "broad-except"
            else:
                return pylint_token

        return part.group(1) + ",".join(
            sorted(
                set(
                    renamer(token)
                    for token in part.group(2).split(",") if token)))

    new_code = re.sub(r"(pylint\: disable=)(.*)",
                      replacer,
                      new_code,
                      flags=re.M)

    if new_code != old_code:
        putTextFileContents(filename, new_code)
Ejemplo n.º 13
0
def _cleanupImportSortOrder(filename):
    _cleanupImportRelative(filename)

    isort_call = _getPythonBinaryCall("isort")

    contents = getFileContents(filename)

    start_index = None
    if "\n# isort:start" in contents:
        parts = contents.splitlines()

        start_index = parts.index("# isort:start")
        contents = "\n".join(parts[start_index + 1 :])

        with open(filename, "w") as out_file:
            out_file.write(contents)

    with open(os.devnull, "w") as devnull:
        check_call(
            isort_call
            + [
                "-q",  # quiet, but stdout is still garbage
                "--overwrite-in-place",  # avoid using another temp file, this is already on one.
                "-ot",  # Order imports by type in addition to alphabetically
                "-m3",  # "vert-hanging"
                "-tc",  # Trailing commas
                "-p",  # make sure nuitka is first party package in import sorting.
                "nuitka",
                "-o",
                "SCons",
                filename,
            ],
            stdout=devnull,
        )

    if start_index is not None:
        contents = getFileContents(filename)

        contents = "\n".join(parts[: start_index + 1]) + "\n" + contents

        with open(filename, "w") as out_file:
            out_file.write(contents)
Ejemplo n.º 14
0
def _discardDebianChangelogLastEntry():
    changelog_lines = getFileContents("debian/changelog").splitlines()

    with openTextFile("debian/changelog", "w") as output:
        first = True
        for line in changelog_lines[1:]:
            if line.startswith("nuitka") and first:
                first = False

            if not first:
                output.write(line + "\n")
Ejemplo n.º 15
0
 def mungeFile(filename):
     # Read the filename and modify it so that it can be bundled with the
     # other Pmw files.
     filename = "Pmw" + filename + ".py"
     text = getFileContents(os.path.join(srcdir, filename))
     text = re.sub(r"import Pmw\>", "", text)
     text = re.sub("INITOPT = Pmw.INITOPT", "", text)
     text = re.sub(r"\<Pmw\.", "", text)
     text = "\n" + ("#" *
                    70) + "\n" + "### File: " + filename + "\n" + text
     return text
Ejemplo n.º 16
0
def _cleanupImportSortOrder(filename):
    _cleanupImportRelative(filename)

    isort_call = _getPythonBinaryCall("isort")

    contents = getFileContents(filename)

    start_index = None
    if "\n# isort:start" in contents:
        parts = contents.splitlines()

        start_index = parts.index("# isort:start")
        contents = "\n".join(parts[start_index + 1:])

        with open(filename, "w") as out_file:
            out_file.write(contents)

    with open(os.devnull, "w") as devnull:
        check_call(
            isort_call + [
                "-q",  # quiet, but stdout is still garbage
                "-ot",  # Order imports by type in addition to alphabetically
                "-m3",  # "vert-hanging"
                "-up",  # Prefer braces () over \ for line continuation.
                "-tc",  # Trailing commas
                "-p",  # make sure nuitka is first party package in import sorting.
                "nuitka",
                "-ns",  # Do not ignore those:
                "__init__.py",
                filename,
            ],
            stdout=devnull,
        )

    if start_index is not None:
        contents = getFileContents(filename)

        contents = "\n".join(parts[:start_index + 1]) + "\n" + contents

        with open(filename, "w") as out_file:
            out_file.write(contents)
Ejemplo n.º 17
0
    def makeCoverageRelative(filename):
        """Normalize coverage data."""

        data = getFileContents(filename)

        data = data.replace(
            (os.path.abspath(".") + os.path.sep).replace("\\", "\\\\"), "")

        if os.path.sep != "/":
            data.replace(os.path.sep, "/")

        putTextFileContents(filename, contents=data)
Ejemplo n.º 18
0
def runCodespell(filenames, verbose, write):
    if verbose:
        my_print("Consider", " ".join(filenames))

    command = [
        "codespell",
        "-f",
        "-I",
        os.path.join(
            os.path.dirname(__file__),
            "..",
            "..",
            "..",
            "..",
            "misc/codespell-ignore.txt",
        ),
    ]
    if write:
        command.append("-w")
    command += filenames

    if os.name == "nt":
        extra_path = os.path.join(sys.prefix, "Scripts")
    else:
        extra_path = None

    with withEnvironmentPathAdded("PATH", extra_path):
        result = subprocess.call(command)

    if result == 0:
        for filename in filenames:
            if areSamePaths(__file__, filename):
                continue

            contents = getFileContents(filename)
            old_contents = contents

            for word, replacement in replacements:
                contents = contents.replace(word, replacement)
                contents = contents.replace(word.title(), replacement.title())

            if old_contents != contents:
                putTextFileContents(filename, contents)
                cleanupWindowsNewlines(filename)

    if verbose:
        if result != 0:
            my_print("FAILED.")
        else:
            my_print("OK.")

    return result == 0
Ejemplo n.º 19
0
def updateDebianChangelog(old_version, new_version, distribution):
    debian_version = new_version.replace("rc", "~rc") + "+ds-1"

    os.environ["DEBEMAIL"] = "Kay Hayen <*****@*****.**>"

    if "rc" in new_version:
        if "rc" in old_version:
            # Initial final release after pre-releases.
            _discardDebianChangelogLastEntry()

        message = "New upstream pre-release."

        if checkNuitkaChangelog() != "draft":
            changelog = getFileContents("Changelog.rst")

            title = "Nuitka Release " + new_version[:-3] + " (Draft)"

            found = False
            with openTextFile("Changelog.rst", "w") as changelog_file:
                for line in changelog.splitlines():
                    if not found:
                        if line.startswith("***") and line.endswith("***"):
                            found = True

                            marker = "*" * (len(title) + 2)

                            changelog_file.write(
                                marker + "\n " + title + "\n" + marker + "\n\n"
                            )
                            changelog_file.write("This release is not done yet.\n\n")
                            changelog_file.write(line + "\n")

                            continue

                    changelog_file.write(line + "\n")

            assert found

    else:
        if "rc" in old_version:
            # Initial final release after pre-releases.
            _discardDebianChangelogLastEntry()

            message = "New upstream release."
        else:
            # Initial final release after pre-releases.

            # Hotfix release after previous final or hotfix release.
            message = "New upstream hotfix release."

    _callDebchange("--newversion=%s" % debian_version, message)
    _callDebchange("-r", "--distribution", distribution, "")
Ejemplo n.º 20
0
def checkAtHome(expected="Nuitka Staging"):
    assert os.path.isfile("setup.py")

    if os.path.isdir(".git"):
        git_dir = ".git"
    else:
        line = getFileFirstLine(".git", "r").strip()
        git_dir = line[8:]

    git_description_filename = os.path.join(git_dir, "description")
    description = getFileContents(git_description_filename).strip()

    assert description == expected, (expected, description)
Ejemplo n.º 21
0
def _cleanupImportSortOrder(filename):
    isort_call = _getPythonBinaryCall("isort")

    contents = getFileContents(filename)

    start_index = None
    if "\n# isort:start" in contents:
        parts = contents.splitlines()

        start_index = parts.index("# isort:start")
        contents = "\n".join(parts[start_index + 1 :])

        with open(filename, "w") as out_file:
            out_file.write(contents)

    with open(os.devnull, "w") as devnull:
        subprocess.check_call(
            isort_call
            + [
                "-q",  # quiet, but stdout is still garbage
                "-ot",  # Order imports by type in addition to alphabetically
                "-m3",  # "vert-hanging"
                "-up",  # Prefer braces () over \ for line continuation.
                "-tc",  # Trailing commas
                "-ns",  # Do not ignore those:
                "__init__.py",
                filename,
            ],
            stdout=devnull,
        )

    if start_index is not None:
        contents = getFileContents(filename)

        contents = "\n".join(parts[: start_index + 1]) + "\n" + contents

        with open(filename, "w") as out_file:
            out_file.write(contents)
Ejemplo n.º 22
0
def _cleanupImportRelative(filename):
    package_name = os.path.dirname(filename).replace(os.path.sep, ".")

    # Make imports local if possible.
    if package_name.startswith("nuitka."):

        source_code = getFileContents(filename)
        updated_code = re.sub(r"from %s import" % package_name,
                              "from . import", source_code)
        updated_code = re.sub(r"from %s\." % package_name, "from .",
                              source_code)

        if source_code != updated_code:
            with open(filename, "w") as out_file:
                out_file.write(updated_code)
Ejemplo n.º 23
0
def _cleanupImportRelative(filename):
    package_name = os.path.dirname(filename)

    # Make imports local if possible.
    if package_name.startswith("nuitka" + os.path.sep):
        package_name = package_name.replace(os.path.sep, ".")

        source_code = getFileContents(filename)
        updated_code = re.sub(
            r"from %s import" % package_name, "from . import", source_code
        )
        updated_code = re.sub(r"from %s\." % package_name, "from .", source_code)

        if source_code != updated_code:
            with open(filename, "w") as out_file:
                out_file.write(updated_code)
Ejemplo n.º 24
0
def checkCachingSuccess(source_dir):
    ccache_logfile = getSconsReportValue(source_dir, "CCACHE_LOGFILE")

    if ccache_logfile is not None:
        stats = _getCcacheStatistics(ccache_logfile)

        if not stats:
            scons_logger.warning("You are not using ccache.")
        else:
            counts = defaultdict(int)

            for _command, result in stats.items():
                # These are not important to our users, time based decisions differentiate these.
                if result in ("cache hit (direct)", "cache hit (preprocessed)"):
                    result = "cache hit"

                counts[result] += 1

            scons_logger.info("Compiled %d C files using ccache." % len(stats))
            for result, count in counts.items():
                scons_logger.info(
                    "Cached C files (using ccache) with result '%s': %d"
                    % (result, count)
                )

    if os.name == "nt":
        clcache_stats_filename = getSconsReportValue(source_dir, "CLCACHE_STATS")

        if clcache_stats_filename is not None and os.path.exists(
            clcache_stats_filename
        ):
            stats = eval(  # lazy, pylint: disable=eval-used
                getFileContents(clcache_stats_filename)
            )

            clcache_hit = stats["CacheHits"]
            clcache_miss = stats["CacheMisses"]

            scons_logger.info(
                "Compiled %d C files using clcache with %d cache hits and %d cache misses."
                % (clcache_hit + clcache_miss, clcache_hit, clcache_miss)
            )
Ejemplo n.º 25
0
def _cleanupImportRelative(filename):
    """Make imports of Nuitka package when possible."""

    # Avoid doing it for "__main__" packages, because for those the Visual Code
    # IDE doesn't like it and it may not run
    if os.path.basename(filename) == "__main__.py.tmp":
        return

    package_name = os.path.dirname(filename).replace(os.path.sep, ".")

    # Make imports local if possible.
    if not package_name.startswith("nuitka."):
        return

    source_code = getFileContents(filename)
    updated_code = re.sub(r"from %s import" % package_name, "from . import",
                          source_code)
    updated_code = re.sub(r"from %s\." % package_name, "from .", source_code)

    if source_code != updated_code:
        putTextFileContents(filename, contents=updated_code)
Ejemplo n.º 26
0
    def makeManpage(python, suffix):
        cmd = [
            "help2man",
            "-n",
            "the Python compiler",
            "--no-discard-stderr",
            "--no-info",
            "--include",
            "doc/nuitka-man-include.txt",
            "%s ./bin/nuitka" % python,
        ]

        with openTextFile("doc/nuitka%s.1" % suffix, "wb") as output:
            check_call(cmd, stdout=output)
        cmd[-1] += "-run"
        with openTextFile("doc/nuitka%s-run.1" % suffix, "wb") as output:
            check_call(cmd, stdout=output)

        for manpage in ("doc/nuitka%s.1" % suffix,
                        "doc/nuitka%s-run.1" % suffix):
            manpage_contents = getFileContents(manpage).splitlines()

            new_contents = []
            mark = False

            for count, line in enumerate(manpage_contents):
                if mark:
                    line = ".SS " + line + ".BR\n"
                    mark = False
                elif line == ".IP\n" and manpage_contents[count +
                                                          1].endswith(":\n"):
                    mark = True
                    continue

                if line == r"\fB\-\-g\fR++\-only" + "\n":
                    line = r"\fB\-\-g\++\-only\fR" + "\n"

                new_contents.append(line)

            putTextFileContents(manpage, contents=new_contents)
Ejemplo n.º 27
0
def checkRstLint(document):
    contents = getFileContents(document, mode="rb")

    for keyword in extra_rst_keywords:
        contents = contents.replace(b".. %s::" % keyword,
                                    b".. raw:: %s" % keyword)

    import restructuredtext_lint  # pylint: disable=I0021,import-error

    my_print("Checking %r for proper restructured text ..." % document,
             style="blue")
    lint_results = restructuredtext_lint.lint(
        contents.decode("utf8"),
        document,
    )

    lint_error = False
    for lint_result in lint_results:
        # Not an issue.
        if lint_result.message.startswith("Duplicate implicit target name:"):
            continue

        # We switched to raw, but attributes will still bne unknown.
        if lint_result.message.startswith(
                'Error in "raw" directive:\nunknown option: "hidden"'):
            continue
        if lint_result.message.startswith(
                'Error in "raw" directive:\nunknown option: "excerpts"'):
            continue
        if lint_result.message.startswith(
                'Error in "raw" directive:\nunknown option: "members"'):
            continue

        my_print(lint_result, style="yellow")
        lint_error = True

    if lint_error:
        sys.exit("Error, no lint clean rest.")

    my_print("OK.", style="blue")
Ejemplo n.º 28
0
def main():
    goHome()

    if os.name == "nt":
        git_path = getExecutablePath("git")

        if git_path is None:
            git_path = r"C:\Program Files\Git\bin\sh.exe"

            if not os.path.exists(git_path):
                git_path = None

        if git_path is None:
            sys.exit(
                """\
Error, cannot locate 'git.exe' which we need to install git hooks. Add it to
PATH while executing this will be sufficient."""
            )

        sh_path = os.path.join(os.path.dirname(git_path), "sh.exe")

        if not os.path.exists(sh_path):
            sh_path = os.path.join(
                os.path.dirname(git_path), "..", "..", "bin", "sh.exe"
            )

        sh_path = os.path.normpath(sh_path)

        if not os.path.exists(sh_path):
            sys.exit(
                """\
Error, cannot locate 'sh.exe' near 'git.exe' which we need to install git hooks,
please improve this script."""
            )

        # For MinGW and #! we will need a path without spaces, so use this
        # code to find the short name, that won't have it.
        sh_path = getWindowsShortPathName(sh_path)

    for hook in os.listdir(".githooks"):
        full_path = os.path.join(".githooks", hook)

        hook_contents = getFileContents(full_path)

        if hook_contents.startswith("#!/bin/sh"):
            if os.name == "nt":
                # Correct shebang for Windows git to work.
                hook_contents = "#!%s\n%s" % (
                    sh_path.replace("\\", "/").replace(" ", r"\ "),
                    hook_contents[10:],
                )

                # Also use sys.executable to make sure we find autoformat.
                hook_contents = hook_contents.replace(
                    "./bin/autoformat-nuitka-source",
                    "'%s' ./bin/autoformat-nuitka-source" % sys.executable,
                )
        else:
            sys.exit("Error, unknown hook contents.")

        hook_target = os.path.join(".git/hooks/", hook)
        with open(hook_target, "wb") as out_file:
            out_file.write(hook_contents.encode("utf8"))

        st = os.stat(hook_target)
        os.chmod(hook_target, st.st_mode | stat.S_IEXEC)
Ejemplo n.º 29
0
def main():
    goHome()

    if os.name == "nt":
        git_path = getExecutablePath("git")

        if git_path is None:
            git_path = r"C:\Program Files\Git\bin\sh.exe"

            if not os.path.exists(git_path):
                git_path = None

        if git_path is None:
            sys.exit("""\
Error, cannot locate 'git.exe' which we need to install git hooks. Add it to
PATH while executing this will be sufficient.""")

        sh_path = os.path.join(os.path.dirname(git_path), "sh.exe")

        if not os.path.exists(sh_path):
            sh_path = os.path.join(os.path.dirname(git_path), "..", "..",
                                   "bin", "sh.exe")

        sh_path = os.path.normpath(sh_path)

        if not os.path.exists(sh_path):
            sys.exit("""\
Error, cannot locate 'sh.exe' near 'git.exe' which we need to install git hooks,
please improve this script.""")

        # For MinGW and #! we will need a path without spaces, so use this
        # code to find the short name, that won't have it.
        sh_path = getWindowsShortPathName(sh_path)

    for hook in os.listdir(".githooks"):
        full_path = os.path.join(".githooks", hook)

        hook_contents = getFileContents(full_path)

        if hook_contents.startswith("#!/bin/sh"):
            if os.name == "nt":
                # Correct shebang for Windows git to work.
                hook_contents = "#!%s\n%s" % (
                    sh_path.replace("\\", "/").replace(" ", r"\ "),
                    hook_contents[10:],
                )

                # Also use sys.executable to make sure we find autoformat.
                hook_contents = hook_contents.replace(
                    "./bin/autoformat-nuitka-source",
                    "'%s' ./bin/autoformat-nuitka-source" % sys.executable,
                )
        else:
            sys.exit("Error, unknown hook contents.")

        hook_target = os.path.join(".git/hooks/", hook)
        with open(hook_target, "wb") as out_file:
            out_file.write(hook_contents.encode("utf8"))

        st = os.stat(hook_target)
        os.chmod(hook_target, st.st_mode | stat.S_IEXEC)
Ejemplo n.º 30
0
def autoformat(filename, git_stage, abort):
    # This does a lot of distinctions, pylint:disable=too-many-branches

    filename = os.path.normpath(filename)

    my_print("Consider", filename, end=": ")

    is_python = _isPythonFile(filename)

    is_c = filename.endswith((".c", ".h"))

    # Some parts of Nuitka must not be re-formatted with black or clang-format
    # as they have different intentions.
    if _shouldNotFormatCode(filename):
        is_python = is_c = False

    # Work on a temporary copy
    tmp_filename = filename + ".tmp"

    if git_stage:
        old_code = getFileHashContent(git_stage["dst_hash"])
    else:
        old_code = getFileContents(filename)

    with open(tmp_filename, "wb") as output_file:
        output_file.write(old_code)

    try:
        _cleanupWindowsNewlines(tmp_filename)

        if is_python:
            _cleanupPyLintComments(tmp_filename, abort)
            _cleanupImportSortOrder(tmp_filename)

        if is_python:
            black_call = _getPythonBinaryCall("black")

            subprocess.call(black_call + ["-q", tmp_filename])
        elif is_c:
            _cleanupClangFormat(filename)
        else:
            _cleanupTrailingWhitespace(tmp_filename)

        _cleanupWindowsNewlines(tmp_filename)

        changed = False
        if old_code != getFileContents(tmp_filename):
            my_print("Updated.")

            if git_stage:
                new_hash_value = putFileHashContent(tmp_filename)
                updateFileIndex(git_stage, new_hash_value)
                updateWorkingFile(filename, git_stage["dst_hash"], new_hash_value)
            else:
                renameFile(tmp_filename, filename)

            changed = True
        else:
            my_print("OK.")

        return changed
    finally:
        if os.path.exists(tmp_filename):
            os.unlink(tmp_filename)
Ejemplo n.º 31
0
def executePostProcessing():
    # These is a bunch of stuff to consider, pylint: disable=too-many-branches

    result_filename = OutputDirectories.getResultFullpath()

    if not os.path.exists(result_filename):
        postprocessing_logger.sysexit(
            "Error, scons failed to create the expected file %r. " %
            result_filename)

    if isWin32Windows():
        if not Options.shallMakeModule():
            needs_manifest = False
            manifest = None

            if python_version < 0x300:
                # Copy the Windows manifest from the CPython binary to the created
                # executable, so it finds "MSCRT.DLL". This is needed for Python2
                # only, for Python3 newer MSVC doesn't hide the C runtime.
                manifest = getWindowsExecutableManifest(sys.executable)

                if manifest is not None:
                    needs_manifest = True

            if (Options.shallAskForWindowsAdminRights()
                    or Options.shallAskForWindowsUIAccessRights()):
                needs_manifest = True

                if manifest is None:
                    manifest = getDefaultWindowsExecutableManifest()

                if Options.shallAskForWindowsAdminRights():
                    manifest.addUacAdmin()

                if Options.shallAskForWindowsUIAccessRights():
                    manifest.addUacUiAccess()

            if needs_manifest:
                manifest.addResourceToFile(result_filename,
                                           logger=postprocessing_logger)

        if (Options.getWindowsVersionInfoStrings()
                or Options.getWindowsProductVersion()
                or Options.getWindowsFileVersion()):
            version_resources.update(
                addVersionInfoResource(
                    string_values=Options.getWindowsVersionInfoStrings(),
                    product_version=Options.getWindowsProductVersion(),
                    file_version=Options.getWindowsFileVersion(),
                    file_date=(0, 0),
                    is_exe=not Options.shallMakeModule(),
                    result_filename=result_filename,
                    logger=postprocessing_logger,
                ))

        source_dir = OutputDirectories.getSourceDirectoryPath()

        # Attach the binary blob as a Windows resource.
        addResourceToFile(
            target_filename=result_filename,
            data=getFileContents(getConstantBlobFilename(source_dir), "rb"),
            resource_kind=RT_RCDATA,
            res_name=3,
            lang_id=0,
            logger=postprocessing_logger,
        )

        # Attach icons from template file if given.
        template_exe = Options.getWindowsIconExecutablePath()
        if template_exe is not None:
            res_copied = copyResourcesFromFileToFile(
                template_exe,
                target_filename=result_filename,
                resource_kinds=(RT_ICON, RT_GROUP_ICON),
            )

            if res_copied == 0:
                postprocessing_logger.warning(
                    "The specified icon template executable %r didn't contain anything to copy."
                    % template_exe)
            else:
                postprocessing_logger.warning(
                    "Copied %d icon resources from %r." %
                    (res_copied, template_exe))
        else:
            addWindowsIconFromIcons()

    # On macOS, we update the executable path for searching the "libpython"
    # library.
    if (getOS() == "Darwin" and not Options.shallMakeModule()
            and not Options.shallUseStaticLibPython()):
        python_abi_version = python_version_str + getPythonABI()
        python_dll_filename = "libpython" + python_abi_version + ".dylib"
        python_lib_path = os.path.join(sys.prefix, "lib")

        # Note: For CPython and potentially others, the rpath for the Python
        # library needs to be set.

        callInstallNameTool(
            filename=result_filename,
            mapping=(
                (
                    python_dll_filename,
                    os.path.join(python_lib_path, python_dll_filename),
                ),
                (
                    "@rpath/Python3.framework/Versions/%s/Python3" %
                    python_version_str,
                    os.path.join(python_lib_path, python_dll_filename),
                ),
            ),
            rpath=python_lib_path,
        )

    # Modules should not be executable, but Scons creates them like it, fix
    # it up here.
    if not isWin32Windows() and Options.shallMakeModule():
        removeFileExecutablePermission(result_filename)

    if isWin32Windows() and Options.shallMakeModule():
        candidate = os.path.join(
            os.path.dirname(result_filename),
            "lib" + os.path.basename(result_filename)[:-4] + ".a",
        )

        if os.path.exists(candidate):
            os.unlink(candidate)

    if isWin32Windows() and Options.shallTreatUninstalledPython():
        shutil.copy(getTargetPythonDLLPath(),
                    os.path.dirname(result_filename) or ".")
Ejemplo n.º 32
0
def packDistFolderToOnefileLinux(onefile_output_filename, dist_dir, binary_filename):
    """Pack to onefile binary on Linux.

    Notes: This is mostly a wrapper around AppImage, which does all the heavy
    lifting.
    """

    if not locateDLL("fuse"):
        postprocessing_logger.sysexit(
            """\
Error, the fuse library (libfuse.so.x from fuse2, *not* fuse3) must be installed
for onefile creation to work on Linux."""
        )

    # This might be possible to avoid being done with --runtime-file.
    apprun_filename = os.path.join(dist_dir, "AppRun")
    with open(apprun_filename, "w") as output_file:
        output_file.write(
            """\
#!/bin/bash
exec -a $ARGV0 $APPDIR/%s $@"""
            % os.path.basename(binary_filename)
        )

    addFileExecutablePermission(apprun_filename)

    binary_basename = os.path.basename(getResultBasepath())

    icon_paths = getIconPaths()

    assert icon_paths
    extension = os.path.splitext(icon_paths[0])[1].lower()

    shutil.copyfile(icon_paths[0], getResultBasepath() + extension)

    with open(getResultBasepath() + ".desktop", "w") as output_file:
        output_file.write(
            """\
[Desktop Entry]
Name=%(binary_basename)s
Exec=%(binary_filename)s
Icon=%(binary_basename)s
Type=Application
Categories=Utility;"""
            % {
                "binary_basename": binary_basename,
                "binary_filename": os.path.basename(binary_filename),
            }
        )

    postprocessing_logger.info(
        "Creating single file from dist folder, this may take a while."
    )

    stdout_filename = binary_filename + ".appimage.stdout.txt"
    stderr_filename = binary_filename + ".appimage.stderr.txt"

    stdout_file = open(stdout_filename, "wb")
    stderr_file = open(stderr_filename, "wb")

    # Starting the process while locked, so file handles are not duplicated.
    appimagetool_process = subprocess.Popen(
        (
            _getAppImageToolPath(
                for_operation=True, assume_yes_for_downloads=assumeYesForDownloads()
            ),
            dist_dir,
            "--comp",
            "xz",
            "-n",
            onefile_output_filename,
        ),
        shell=False,
        stdin=getNullInput(),
        stdout=stdout_file,
        stderr=stderr_file,
    )

    result = appimagetool_process.wait()

    stdout_file.close()
    stderr_file.close()

    if not os.path.exists(onefile_output_filename):
        postprocessing_logger.sysexit(
            "Error, expected output file %r not created by AppImage, check its outputs %r and %r."
            % (onefile_output_filename, stdout_filename, stderr_filename)
        )

    if result != 0:
        # Useless now.
        os.unlink(onefile_output_filename)

        if b"Text file busy" in getFileContents(stderr_filename, mode="rb"):
            postprocessing_logger.sysexit(
                "Error, error exit from AppImage because target file is locked."
            )

        postprocessing_logger.sysexit(
            "Error, error exit from AppImage, check its outputs %r and %r."
            % (stdout_filename, stderr_filename)
        )

    os.unlink(stdout_filename)
    os.unlink(stderr_filename)

    postprocessing_logger.info("Completed onefile creation.")
Ejemplo n.º 33
0
    def __init__(self):
        """ Read the JSON file and enable any standard plugins.
        """
        if not getNuitkaVersion() >= "0.6.6":
            sys.exit("Need Nuitka v0.6.6+ for hinted compilation.")
        # start a timer
        self.timer = StopWatch()
        self.timer.start()

        self.implicit_imports = OrderedSet()  # speed up repeated lookups
        self.ignored_modules = OrderedSet()  # speed up repeated lookups
        options = Options.options
        fin_name = self.getPluginOptions()[0]  # the JSON  file name
        import_info = json.loads(
            getFileContents(fin_name))  # read it and extract the two lists
        self.import_calls = import_info["calls"]
        self.import_files = import_info["files"]
        self.msg_count = dict()  # to limit keep messages
        self.msg_limit = 21

        # suppress pytest / _pytest / unittest?
        self.accept_test = self.getPluginOptionBool("test", False)
        """
        Check if we should enable any (optional) standard plugins. This code
        must be modified whenever more standard plugin become available.
        """
        show_msg = False  # only show info if one ore more detected
        # indicators for found packages
        tk = np = qt = sc = mp = pmw = torch = sklearn = False
        eventlet = tflow = gevent = mpl = trio = False
        msg = "'%s' is adding the following options:" % self.plugin_name

        # detect required standard plugins and request enabling them
        for m in self.import_calls:  # scan thru called items
            if m in ("numpy", "numpy.*"):
                np = True
                show_msg = True
            if m in ("matplotlib", "matplotlib.*"):
                mpl = True
                show_msg = True
            elif m in ("tkinter", "Tkinter", "tkinter.*", "Tkinter.*"):
                tk = True
                show_msg = True
            elif m.startswith(("PyQt", "PySide")):
                qt = True
                show_msg = True
            elif m in ("scipy", "scipy.*"):
                sc = True
                show_msg = True
            elif m in ("multiprocessing",
                       "multiprocessing.*") and getOS() == "Windows":
                mp = True
                show_msg = True
            elif m in ("Pmw", "Pmw.*"):
                pmw = True
                show_msg = True
            elif m == "torch":
                torch = True
                show_msg = True
            elif m in ("sklearn", "sklearn.*"):
                sklearn = True
                show_msg = True
            elif m in ("tensorflow", "tensorflow.*"):
                tflow = True
                show_msg = True
            elif m in ("gevent", "gevent.*"):
                gevent = True
                show_msg = True
            elif m in ("eventlet", "eventlet.*"):
                eventlet = True
                show_msg = True
            # elif m in ("trio", "trio.*"):
            #    trio = True
            #    show_msg = True

        if show_msg is True:
            info(msg)

        if np:
            o = ["numpy="]
            if mpl:
                o.append("matplotlib")
            if sc:
                o.append("scipy")
            if sklearn:
                o.append("sklearn")
            o = ",".join(o).replace("=,", "=")
            if o.endswith("="):
                o = o[:-1]
            options.plugins_enabled.append(o)  # enable numpy
            info("--enable-plugin=" + o)

        if tk:
            options.plugins_enabled.append("tk-inter")  # enable tk-inter
            info("--enable-plugin=tk-inter")

        if qt:
            # TODO more scrutiny for the qt options!
            options.plugins_enabled.append("qt-plugins=sensible")
            info("--enable-plugin=qt-plugins=sensible")

        if mp:
            options.plugins_enabled.append("multiprocessing")
            info("--enable-plugin=multiprocessing")

        if pmw:
            options.plugins_enabled.append("pmw-freezer")
            info("--enable-plugin=pmw-freezer")

        if torch:
            options.plugins_enabled.append("torch")
            info("--enable-plugin=torch")

        if tflow:
            options.plugins_enabled.append("tensorflow")
            info("--enable-plugin=tensorflow")

        if gevent:
            options.plugins_enabled.append("gevent")
            info("--enable-plugin=gevent")

        if eventlet:
            options.plugins_enabled.append("eventlet")
            info("--enable-plugin=eventlet")

        # if trio:
        #    options.plugins_enabled.append("trio")
        #    info("--enable-plugin=trio")

        recurse_count = 0
        for f in self.import_files:  # request recursion to called modules
            if self.accept_test is False and f.startswith(
                ("pytest", "_pytest", "unittest")):
                continue
            options.recurse_modules.append(f)
            recurse_count += 1

        # no plugin detected, but recursing to modules?
        if show_msg is False and recurse_count > 0:
            info(msg)

        msg = "--recurse-to for %i imported modules." % recurse_count

        if len(self.import_files) > 0:
            info(msg)
            info("")

        self.ImplicitImports = None  # the 'implicit-imports' plugin object
        return None
Ejemplo n.º 34
0
def autoformat(filename, git_stage, abort, effective_filename=None):
    """ Format source code with external tools

    Args:
        filename: filename to work on
        git_stage: indicate if this is to be done on staged content
        abort: error exit in case a tool shows a problem
        effective_filename: derive type of file from this name

    Notes:
        The effective filename can be used in case this is already a
        temporary filename intended to replace another.

    Returns:
        None
    """

    # This does a lot of distinctions, pylint: disable=too-many-branches,too-many-statements

    if effective_filename is None:
        effective_filename = filename

    if os.path.isdir(effective_filename):
        return

    filename = os.path.normpath(filename)
    effective_filename = os.path.normpath(effective_filename)

    my_print("Consider", filename, end=": ")

    is_python = _isPythonFile(effective_filename)

    is_c = effective_filename.endswith((".c", ".h"))

    is_txt = effective_filename.endswith((
        ".patch",
        ".txt",
        ".rst",
        ".sh",
        ".in",
        ".md",
        ".yml",
        ".stylesheet",
        ".j2",
        ".gitignore",
        ".json",
        ".spec",
        "-rpmlintrc",
    )) or os.path.basename(filename) in (
        "changelog",
        "compat",
        "control",
        "lintian-overrides",
    )

    # Some parts of Nuitka must not be re-formatted with black or clang-format
    # as they have different intentions.
    if not (is_python or is_c or is_txt):
        my_print("Ignored file type.")
        return

    # Work on a temporary copy
    tmp_filename = filename + ".tmp"

    if git_stage:
        old_code = getFileHashContent(git_stage["dst_hash"])
    else:
        old_code = getFileContents(filename, "rb")

    with open(tmp_filename, "wb") as output_file:
        output_file.write(old_code)

    try:
        if is_python:
            cleanupWindowsNewlines(tmp_filename)

            if not _shouldNotFormatCode(effective_filename):
                _cleanupImportSortOrder(tmp_filename)
                _cleanupPyLintComments(tmp_filename, abort)

                black_call = _getPythonBinaryCall("black")

                subprocess.call(black_call + ["-q", "--fast", tmp_filename])
                cleanupWindowsNewlines(tmp_filename)

        elif is_c:
            cleanupWindowsNewlines(tmp_filename)
            _cleanupClangFormat(filename)
            cleanupWindowsNewlines(tmp_filename)
        elif is_txt:
            cleanupWindowsNewlines(tmp_filename)
            _cleanupTrailingWhitespace(tmp_filename)
            cleanupWindowsNewlines(tmp_filename)

        _transferBOM(filename, tmp_filename)

        changed = False
        if old_code != getFileContents(tmp_filename, "rb"):
            my_print("Updated.")

            with withPreserveFileMode(filename):
                if git_stage:
                    new_hash_value = putFileHashContent(tmp_filename)
                    updateFileIndex(git_stage, new_hash_value)
                    updateWorkingFile(filename, git_stage["dst_hash"],
                                      new_hash_value)
                else:
                    renameFile(tmp_filename, filename)

            changed = True
        else:
            my_print("OK.")

        return changed
    finally:
        if os.path.exists(tmp_filename):
            os.unlink(tmp_filename)
Ejemplo n.º 35
0
def main():
    goHome()

    parser = OptionParser()

    parser.add_option(
        "--upload",
        action="store_true",
        dest="upload",
        default=False,
        help="""\
Upload to http://nuitka.net/apidoc requires access rights and is done by the
official servers automatically only. Without this, create the local html folder
only.

Default is %default.""",
    )

    options, _positional_args = parser.parse_args()

    shutil.rmtree("html", ignore_errors=True)

    doxygen_path = getExecutablePath("doxygen")

    # Extra ball on Windows, check default installation PATH too.
    if not doxygen_path and getOS() == "Windows":
        with withEnvironmentPathAdded("PATH", r"C:\Program Files\Doxygen\bin"):
            doxygen_path = getExecutablePath("doxygen")

    if not doxygen_path:
        sys.exit("Error, need to install Doxygen and add it to PATH for this to work.")

    try:
        import doxypypy  # @UnusedImport pylint: disable=I0021,unused-import,unused-variable
    except ImportError:
        sys.exit("Error, needs to install doxypypy into this Python.")

    with withTemporaryFile(suffix=".doxyfile", delete=False) as doxy_file:
        doxy_config = getFileContents("doc/Doxyfile.template")

        with withTemporaryFile(
            suffix=".bat" if getOS() == "Windows" else ".sh", delete=False
        ) as doxy_batch_file:
            if getOS() == "Windows":
                doxy_batch_file.write(
                    "%s -m doxypypy.doxypypy -a -c %%1" % sys.executable
                )
            else:
                doxy_batch_file.write(
                    "#!/bin/sh\nexec '%s' -m doxypypy.doxypypy -a -c $1"
                    % sys.executable
                )

        doxy_batch_filename = doxy_batch_file.name

        doxy_config = doxy_config.replace("%DOXYPYPY%", doxy_batch_filename)
        doxy_file.write(doxy_config)

        doxy_filename = doxy_file.name

    print("Running doxygen:")
    try:
        subprocess.check_call([doxygen_path, doxy_filename])
    finally:
        os.unlink(doxy_filename)
        os.unlink(doxy_batch_filename)

    # Update the repository on the web site.
    if options.upload:
        assert (
            os.system(
                "rsync -avz --delete html/ --chown www-data [email protected]:/var/www/apidoc/"
            )
            == 0
        )

    print("Finished.")
Ejemplo n.º 36
0
def main():
    goHome()

    parser = OptionParser()

    parser.add_option(
        "--upload",
        action="store_true",
        dest="upload",
        default=False,
        help="""\
Upload to http://nuitka.net/apidoc requires access rights and is done by the
official servers automatically only. Without this, create the local html folder
only.

Default is %default.""",
    )

    options, _positional_args = parser.parse_args()

    shutil.rmtree("html", ignore_errors=True)

    doxygen_path = getExecutablePath("doxygen")

    # Extra ball on Windows, check default installation PATH too.
    if not doxygen_path and getOS() == "Windows":
        with withEnvironmentPathAdded("PATH", r"C:\Program Files\Doxygen\bin"):
            doxygen_path = getExecutablePath("doxygen")

    if not doxygen_path:
        sys.exit(
            "Error, need to install Doxygen and add it to PATH for this to work."
        )

    try:
        import doxypypy  # @UnusedImport pylint: disable=I0021,unused-import,unused-variable
    except ImportError:
        sys.exit("Error, needs to install doxypypy into this Python.")

    with withTemporaryFile(suffix=".doxyfile", delete=False) as doxy_file:
        doxy_config = getFileContents("doc/Doxyfile.template")

        with withTemporaryFile(
                suffix=".bat" if getOS() == "Windows" else ".sh",
                delete=False) as doxy_batch_file:
            if getOS() == "Windows":
                doxy_batch_file.write("%s -m doxypypy.doxypypy -a -c %%1" %
                                      sys.executable)
            else:
                doxy_batch_file.write(
                    "#!/bin/sh\nexec '%s' -m doxypypy.doxypypy -a -c $1" %
                    sys.executable)

        doxy_batch_filename = doxy_batch_file.name

        doxy_config = doxy_config.replace("%DOXYPYPY%", doxy_batch_filename)
        doxy_file.write(doxy_config)

        doxy_filename = doxy_file.name

    print("Running doxygen:")
    try:
        subprocess.check_call([doxygen_path, doxy_filename])
    finally:
        os.unlink(doxy_filename)
        os.unlink(doxy_batch_filename)

    # Update the repository on the web site.
    if options.upload:
        assert (os.system(
            "rsync -avz --delete html/ --chown www-data [email protected]:/var/www/apidoc/"
        ) == 0)

    print("Finished.")