Esempio n. 1
0
def run_format(ctx, check=False, quiet=False):
    """Run black code formatter on source code.

    :param [invoke.Context] ctx: Invoke context.
    :param [bool] check: Only check formatting, don't reformat the code.
    :param [bool] quiet: Don't show progress. Only errors.

    :raises UnexpectedExit: On formatter failure.
    """
    print_header("Formatting code", icon="🖤")
    flags = []

    if check:
        flags.append("--check")

    if quiet:
        flags.append("--quiet")

    dirs = f"{PROJECT_INFO.source_directory} {PROJECT_INFO.tests_directory} {PROJECT_INFO.tasks_directory}"
    cmd = f"black {dirs} " + " ".join(flags)

    result: Result = ctx.run(cmd, pty=True, warn=True)

    if result.return_code == 1 and check:
        cprint(
            "Code was not formatted before commit. Try following:\n"
            " * Enable pre-commit hook by running `pre-commit install` in the repository.\n"
            " * Run formatter manually with `pipenv run inv format` before committing code.",
            color="red",
        )
        raise UnexpectedExit(result)

    if result.return_code > 1:
        raise UnexpectedExit(result)
Esempio n. 2
0
def _lint_pycodestyle(ctx: Context, path: Optional[List[str]] = None):
    """Run PEP8 checking on code; this includes primary code (source) and secondary code (tests, tasks, etc.).

    PEP8 checking is done via pycodestyle.

    Args:
        ctx: Context
        path: Path override. Run tests only on given paths.
    """
    # Why pycodestyle and pylint? So far, pylint does not check against every convention in PEP8. As pylint's
    # functionality grows, we should move all PEP8 checking to pylint and remove pycodestyle
    print_header("code style (PEP8)", level=2)
    ensure_reports_dir()

    paths = to_pathlib_path(path, [
        PROJECT_INFO.source_directory, PROJECT_INFO.tests_directory,
        PROJECT_INFO.tasks_directory
    ])

    try:
        ctx.run(
            f"pycodestyle --ignore=E501,W503,E231 --exclude=.svn,CVS,.bzr,.hg,.git,__pycache__,.tox,*_config_parser.py "
            f"{paths_to_str(paths)} > {REPORT_PYCODESTYLE_FPATH}")
        # Ignores explained:
        # - E501: Line length is checked by PyLint
        # - W503: Disable checking of "Line break before binary operator". PEP8 recently (~2019) switched to
        #         "line break before the operator" style, so we should permit this usage.
        # - E231: "missing whitespace after ','" is a false positive. Handled by black formatter.
    finally:
        if os.path.exists(REPORT_PYCODESTYLE_FPATH):
            format_messages(read_contents(REPORT_PYCODESTYLE_FPATH))
Esempio n. 3
0
def _lint_docstyle(ctx: Context, path: Optional[List[str]] = None):
    """Run docstring linting on source code.

    Docstring linting is done via pydocstyle. The pydocstyle config can be found in the `.pydocstyle` file.
    This ensures compliance with PEP 257, with a few exceptions. Note that pylint also carries out additional
    docstyle checks.

    Args:
        ctx: Context
        path: Path override. Run tests only on given paths.
    """
    print_header("documentation style", level=2)
    ensure_reports_dir()

    paths = to_pathlib_path(path, [
        PROJECT_INFO.source_directory, PROJECT_INFO.tests_directory,
        PROJECT_INFO.tasks_directory
    ])

    try:
        ctx.run(
            f"pydocstyle {paths_to_str(paths)} > {REPORT_PYDOCSTYLE_FPATH}")
    finally:
        if os.path.exists(REPORT_PYDOCSTYLE_FPATH):
            format_messages(read_contents(REPORT_PYDOCSTYLE_FPATH))
Esempio n. 4
0
def _lint_pylint(ctx: Context, path: Optional[List[str]] = None):
    """Run pylint on code; this includes primary code (source) and secondary code (tests, tasks, etc.).

    The bulk of our code conventions are enforced via pylint. The pylint config can be found in the `.pylintrc` file.

    Args:
        ctx: Context
        path: Path override. Run tests only on given paths.
    """
    print_header("pylint", level=2)

    paths = to_pathlib_path(path, [
        PROJECT_INFO.source_directory, PROJECT_INFO.tests_directory,
        PROJECT_INFO.tasks_directory
    ])
    src = PROJECT_INFO.source_directory
    grouped_paths = groupby(
        paths, lambda current_path: src in current_path.parents or current_path
        == src)

    for is_source, group in grouped_paths:
        if is_source:
            run_pylint(ctx, list(group), REPORT_PYLINT_SOURCE_FPATH,
                       PYLINT_CONFIG_SOURCE_FPATH)
        else:
            run_pylint(ctx, list(group), REPORT_PYLINT_TESTS_FPATH,
                       PYLINT_CONFIG_TESTS_FPATH)
Esempio n. 5
0
def coverage_report(ctx):
    """Analyse coverage and generate a report to term and HTML; from combined unit and integration tests."""
    print_header("Generating coverage report", icon="📃")
    ensure_reports_dir()

    coverage_files = []  # we'll make a copy because `combine` will erase them
    for test_type in TestType.__members__.values():
        test_conf = test_type.value
        if not test_conf.coverage_dat.exists():
            cprint(
                f"Could not find coverage dat file for {test_conf.name} tests: {test_conf.coverage_dat}",
                "yellow")
        else:
            print(
                f"{test_conf.name.title()} test coverage: {get_total_coverage(ctx, test_conf.coverage_dat)}"
            )

            temp_copy = test_conf.coverage_dat.with_name(
                test_conf.coverage_dat.name.replace(".dat", "-copy.dat"))
            shutil.copy(test_conf.coverage_dat, temp_copy)
            coverage_files.append(str(temp_copy))

    ctx.run(f"""
            export COVERAGE_FILE="{_COVERAGE_DAT_COMBINED}"
            coverage combine {" ".join(coverage_files)}
            coverage html -d {_COVERAGE_HTML}
        """)
    print(
        f"Total coverage: {get_total_coverage(ctx, _COVERAGE_DAT_COMBINED)}\n")
    print(
        f"Refer to coverage report for full analysis in '{_COVERAGE_HTML}/index.html'\n"
        f"Or open the report in your default browser with:\n"
        f"  pipenv run inv coverage-open")
Esempio n. 6
0
def lint(ctx, path=None):
    """Run linting on the entire code base (source code, tasks, tests).

    Args:
        ctx (invoke.Context): Context
        path (Optional[List[str]]): Path override. Run tests only on given paths.
    """
    print_header("Linting", icon="🔎")

    # These cannot be pre/post tasks because arguments cannot be passed through.
    _lint_pylint(ctx, path)
    _lint_pycodestyle(ctx, path)
    _lint_docstyle(ctx, path)
def build(ctx):
    print_header("Running build", icon="🔨")

    for package in os.listdir(PROJECT_INFO.namespace_directory):
        print_header(f"Building '{package}' package", level=2)

        print("Cleanup the 'build' directory")
        shutil.rmtree("build", ignore_errors=True)

        ctx.run(
            f"python setup.py bdist_wheel --package {package}",
            env={"PYTHONPATH": PROJECT_INFO.source_directory},
            pty=True,
        )
Esempio n. 8
0
def switch_python_version(ctx, version):
    """Switches the local Python virtual environment to a different Python version.

    Use this to test the sub-packages with a different Python version. CI pipeline always
    checks all supported versions automatically.

    Notes:
        This task calls ``deactivate`` as a precaution for cases when the task is called
        from an active virtual environment.

    Args:
        ctx (invoke.Context): Context
        version (str): Desired Python version. You can use only MAJOR.MINOR (for example 3.6).
    """
    print_header(f"Switching to Python {version}", icon="🐍")
    ctx.run(f"deactivate; git clean -fxd .venv && pipenv sync --python {version} -d", pty=True)
Esempio n. 9
0
def upload(ctx):
    print_header("Uploading library to pypi", icon="⬆")

    pypi_api_token = os.getenv("PYPI_API_TOKEN")
    if not pypi_api_token:
        cprint("PYPI_API_TOKEN environment variable to authenticate with PyPI is not set.", color="red")
        raise RuntimeError()

    if not pypi_api_token.startswith("pypi-"):
        pypi_api_token = f"pypi-{pypi_api_token}"

    ctx.run(
        "twine upload --skip-existing --non-interactive dist/*",
        pty=True,
        # See https://pypi.org/help/#apitoken
        env={"TWINE_USERNAME": "******", "TWINE_PASSWORD": pypi_api_token},
    )
Esempio n. 10
0
def run_pylint(ctx, source_dirs: List[Path], report_path: Path,
               pylintrc_fpath: Path):
    """Run pylint with a given configuration on a given code tree and output to a given report file."""
    print_header(paths_to_str(source_dirs, ", "), level=3)
    ensure_reports_dir()
    try:
        # pylint won't lint all `.py` files unless they're in a package (`__init__.py` must exist in the same dir)
        # see https://github.com/PyCQA/pylint/issues/352
        # instead of calling pylint directly, here we use `find` to search for all `py` files, regardless of being in
        # # a package
        ctx.run(f"export PYTHONPATH={PROJECT_INFO.source_directory}\n"
                f'find {paths_to_str(source_dirs)} -type f -name "*.py" | '
                f"xargs pylint --rcfile {pylintrc_fpath} > {report_path}")
    finally:
        if os.path.exists(str(report_path)):
            format_messages(read_contents(report_path),
                            "^.*rated at 10.00/10.*$")
Esempio n. 11
0
def _run_tests(ctx, test_type: TestType, path: Optional[List[str]] = None):
    """Execute the tests for a given test type."""
    print_header(f"️Running {test_type.name} tests️", icon="🔎🐛")
    ensure_reports_dir()

    paths = to_pathlib_path(path, [test_type.value.directory])

    ctx.run(
        f"""
        {test_type.value.exports}
        export PYTHONPATH="$PYTHONPATH:{PROJECT_INFO.source_directory}"
        pytest \
          --cov={PROJECT_INFO.source_directory} --cov-report="" --cov-branch \
          --junitxml={test_type.value.test_report_xml} -vv \
          {paths_to_str(paths)}
        """,
        pty=True,
    )
Esempio n. 12
0
def _typecheck(ctx, paths: List[Path], force_typing=False):
    print_header(("Forced" if force_typing else "Optional") + " typing",
                 level=2)

    common_flags = [
        "--show-column-numbers",
        "--show-error-codes",
        "--color-output",
        "--warn-unused-config",
        "--warn-unused-ignores",
        "--follow-imports silent",
        f"--junit-xml {_REPORTS_DIR}",
        *(["--strict", "--allow-untyped-decorators"] if force_typing else []),
        # Untyped decorators are allowed because they may be third party decorators
    ]

    ctx.run(
        f"set -o pipefail; mypy {' '.join(common_flags)} {paths_to_str(paths)}",
        pty=True)
Esempio n. 13
0
def install_subpackage_dependencies(ctx, name=None, force=False):
    """Replaces top-level Pipfile dependencies with sub-package dependencies.

    Use in CI to install only single sub-package dependencies.
    Use without the ``name`` in local development to get dependencies from all sub-packages.

    Args:
        ctx (invoke.Context): Context
        name (Optional[str]): Name of sub-package for which to collect and install dependencies.
            If not specified, all sub-packages will be used.
        force (bool): Forces reinstall.
    """
    print_header("Sub-packages", icon="📦")
    print_header("Collecting dependencies", level=2, icon="🛒")

    packages = os.listdir(PROJECT_INFO.namespace_directory) if name is None else [name]
    all_requirements = ""
    libraries_to_sub_packages = defaultdict(list)

    for package in packages:
        print(f"  * Collecting '{package}' package")
        with open(PROJECT_INFO.namespace_directory / package / "requirements.txt") as file_pointer:
            requirements = file_pointer.read()
            all_requirements += requirements
            for library in _list_libraries(requirements):
                libraries_to_sub_packages[library].append(package)

    if not force and _requirements_checksum_not_changed(all_requirements):
        return

    print_header("Uninstalling previous dependencies", level=2, icon="🔽")
    ctx.run("pipenv clean", pty=True)

    print_header("Installing new dependencies", level=2, icon="🔼")
    conflicts: List[Match] = []

    for package in packages:
        print_header(package, level=3)
        requirements_file_path = PROJECT_INFO.namespace_directory / package / "requirements.txt"
        result = ctx.run(f"pipenv run pip install -r {requirements_file_path}", echo=True)
        conflicts.extend(match for match in _RE_PIP_CONFLICT.finditer(result.stdout))

    _print_conflicting_packages(libraries_to_sub_packages, conflicts)
Esempio n. 14
0
def typecheck(ctx, path=None):
    """Run type checking on source code.

    A non-zero return code from this task indicates invalid types were discovered.

    Args:
        ctx (invoke.Context): Invoke context.
        path (Optional[List[str]]): Path override. Run tests only on given paths.
    """
    print_header("RUNNING TYPE CHECKER")

    ensure_reports_dir()

    src = PROJECT_INFO.source_directory
    paths = to_pathlib_path(
        path,
        [src, PROJECT_INFO.tests_directory, PROJECT_INFO.tasks_directory])
    grouped_paths = groupby(
        paths, lambda current_path: src in current_path.parents or current_path
        == src)

    for force_typing, group in grouped_paths:
        _typecheck(ctx, list(group), force_typing)
Esempio n. 15
0
def build(ctx):
    print_header("Running build", icon="🔨")

    print("Cleanup the 'build' directory")
    shutil.rmtree("build", ignore_errors=True)

    print_header("Generate requirements.txt file", icon="âš™", level=2)
    parsed_pipfile = Pipfile.load("Pipfile")
    requirements_file = "\n".join([
        "# DON'T CHANGE THIS FILE! It is generated by `inv build`.\n",
        *list(parsed_pipfile.data["default"].keys())
    ])
    print(requirements_file)

    with open("requirements.txt", "w") as fd:
        fd.write(requirements_file)

    print_header("Build packages", icon="🔨", level=2)
    ctx.run("python setup.py sdist bdist_wheel",
            pty=True,
            env={"PYTHONPATH": PROJECT_INFO.source_directory})

    print_header("Check PyPI description", icon="👀", level=2)
    ctx.run("twine check dist/*")