def doctest(session: nox.sessions.Session): """ Perform iris doctests and gallery. Parameters ---------- session: object A `nox.sessions.Session` object. """ prepare_venv(session) session.install("--no-deps", "--editable", ".") session.cd("docs") session.run( "make", "clean", "html", external=True, ) session.run( "make", "doctest", external=True, ) session.cd("..") session.run( "python", "-m", "iris.tests.runner", "--gallery-tests", )
def precommit(session: nox.sessions.Session): """ Perform pre-commit hooks of iris codebase. Parameters ---------- session: object A `nox.sessions.Session` object. """ import yaml # Pip install the session requirements. session.install("pre-commit") # Load the pre-commit configuration YAML file. with open(".pre-commit-config.yaml", "r") as fi: config = yaml.load(fi, Loader=yaml.FullLoader) # List of pre-commit hook ids that we don't want to run. excluded = ["no-commit-to-branch"] # Enumerate the ids of pre-commit hooks we do want to run. ids = [ hook["id"] for entry in config["repos"] for hook in entry["hooks"] if hook["id"] not in excluded ] # Execute the pre-commit hooks. [session.run("pre-commit", "run", "--all-files", id) for id in ids]
def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): if os.path.exists("constraints.txt"): session.install("-r", "requirements.txt", "-c", "constraints.txt") else: session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): if os.path.exists("constraints-test.txt"): session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") else: session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) if post_install: post_install(session) session.run( "pytest", *(PYTEST_COMMON_ARGS + session.posargs), # Pytest will return 5 when no tests are collected. This can happen # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], env=get_pytest_env_vars())
def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG['enforce_type_hints']: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), "." ] session.run("flake8", *args)
def blacken(session: nox.sessions.Session) -> None: session.install("black") black_files = [] for target in BLACK_PATHS: black_files += [ f"{target}/{path}" for path in os.listdir(target) if path.endswith(".py") ] session.run("black", *black_files)
def _prepare_env(session: nox.sessions.Session) -> None: lockfile = _session_lockfile(session) venv_dir = session.virtualenv.location_name if not _venv_populated(session): # Environment has been created but packages not yet installed. # Populate the environment from the lockfile. logger.debug(f"Populating conda env: {venv_dir}") session.conda_install(f"--file={lockfile}") _cache_venv(session) elif _venv_changed(session): # Destroy the environment and rebuild it. logger.debug(f"Lockfile changed. Recreating conda env: {venv_dir}") _reuse_original = session.virtualenv.reuse_existing session.virtualenv.reuse_existing = False session.virtualenv.create() session.conda_install(f"--file={lockfile}") session.virtualenv.reuse_existing = _reuse_original _cache_venv(session) logger.debug(f"Environment up to date: {venv_dir}") iris_artifact = _get_iris_github_artifact(session) if iris_artifact: # Install the iris source in develop mode. tmp_dir = Path(session.create_tmp()) iris_dir = tmp_dir / "iris" cwd = Path.cwd() if not iris_dir.is_dir(): session.run_always("git", "clone", IRIS_GITHUB, str(iris_dir), external=True) session.cd(str(iris_dir)) session.run_always("git", "fetch", "origin", external=True) session.run_always("git", "checkout", iris_artifact, external=True) session.cd(str(cwd)) session.install("--no-deps", "--editable", str(iris_dir)) # Determine whether verbose diagnostics have been requested # from the command line. verbose = "-v" in session.posargs or "--verbose" in session.posargs if verbose: session.run_always("conda", "info") session.run_always("conda", "list", f"--prefix={venv_dir}") session.run_always( "conda", "list", f"--prefix={venv_dir}", "--explicit", )
def lint(session: nox.sessions.Session) -> None: session.install("autoflake", "black", "flake8", "isort", "seed-isort-config") session.run("autoflake", "--in-place", "--recursive", *source_files) session.run("seed-isort-config", "--application-directories=ddtrace_asgi") session.run("isort", "--project=ddtrace_asgi", "--recursive", "--apply", *source_files) session.run("black", "--target-version=py36", *source_files) check(session)
def format(session: nox.sessions.Session) -> None: """ Run isort to sort imports. Then run black to format code to uniform standard. """ session.install(BLACK_VERSION, ISORT_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] # Use the --fss option to sort imports using strict alphabetical order. # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections session.run("isort", "--fss", *python_files) session.run("black", *python_files)
def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) if os.path.exists(os.path.join(dir_, "requirements.txt")): session.install("-r", os.path.join(dir_, "requirements.txt")) in_file = os.path.join(dir_, "README.rst.in") session.run("python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file)
def install_with_constraints(session: nox.sessions.Session, *args: str, **kwargs: Any): """Install PyPI packages based on pyproject.toml constraints.""" with tempfile.NamedTemporaryFile() as requirements: session.run( "poetry", "export", "--dev", "--without-hashes", "--format=requirements.txt", f"--output={requirements.name}", external=True, ) session.install(f"--constraint={requirements.name}", *args, **kwargs)
def isort(session: nox.sessions.Session): """ Perform isort import checking of iris codebase. Parameters ---------- session: object A `nox.sessions.Session` object. """ # Pip install the session requirements. session.install("isort") # Execute the isort import checker. session.run("isort", "--check", ".")
def check(session: nox.sessions.Session) -> None: session.install("black", "flake8", "flake8-bugbear", "flake8-comprehensions", "isort", "mypy") session.run("black", "--check", "--diff", "--target-version=py36", *source_files) session.run("flake8", *source_files) session.run("mypy", *source_files) session.run( "isort", "--check", "--diff", "--project=ddtrace_asgi", "--recursive", *source_files, )
def black(session: nox.sessions.Session): """ Perform black format checking of iris. Parameters ---------- session: object A `nox.sessions.Session` object. """ # Pip install the session requirements. session.install("black==20.8b1") # Execute the black format checker on the package. session.run("black", "--check", PACKAGE) # Execute the black format checker on this file. session.run("black", "--check", __file__)
def flake8(session: nox.sessions.Session): """ Perform flake8 linting of the code-base. Parameters ---------- session: object A `nox.sessions.Session` object. """ # Pip install the session requirements. session.install("flake8", "flake8-docstrings", "flake8-import-order") # Execute the flake8 linter on the package. session.run("flake8", PACKAGE) # Execute the flake8 linter on this file. session.run("flake8", __file__)
def flake8(session: nox.sessions.Session): """ Perform flake8 linting of iris. Parameters ---------- session: object A `nox.sessions.Session` object. """ # Pip install the session requirements. session.install("flake8") # Execute the flake8 linter on the package. session.run("flake8", PACKAGE) # Execute the flake8 linter on this file. session.run("flake8", __file__)
def benchmarks(session: nox.sessions.Session, ci_mode: bool, gh_pages: bool): """ Perform esmf-regrid performance benchmarks (using Airspeed Velocity). Parameters ---------- session: object A `nox.sessions.Session` object. ci_mode: bool Run a cut-down selection of benchmarks, comparing the current commit to the last commit for performance regressions. gh_pages: bool Run ``asv gh-pages --rewrite`` once finished. Notes ----- ASV is set up to use ``nox --session=tests --install-only`` to prepare the benchmarking environment. """ session.install("asv", "nox", "pyyaml") session.cd("benchmarks") # Skip over setup questions for a new machine. session.run("asv", "machine", "--yes") def asv_exec(*sub_args: str) -> None: run_args = ["asv", *sub_args] help_output = session.run(*run_args, "--help", silent=True) if "--python" in help_output: # Not all asv commands accept the --python kwarg. run_args.append(f"--python={session.python}") session.run(*run_args) if ci_mode: # If on a PR: compare to the base (target) branch. # Else: compare to previous commit. previous_commit = os.environ.get("CIRRUS_BASE_SHA", "HEAD^1") try: asv_exec("continuous", previous_commit, "HEAD", "--bench=ci") finally: asv_exec("compare", previous_commit, "HEAD") else: # f32f23a5 = first supporting commit for nox_asv_plugin.py . asv_exec("run", "f32f23a5..HEAD") if gh_pages: asv_exec("gh-pages", "--rewrite")
def benchmarks(session: nox.sessions.Session, ci_mode: bool): """ Perform esmf-regrid performance benchmarks (using Airspeed Velocity). Parameters ---------- session: object A `nox.sessions.Session` object. ci_mode: bool Run a cut-down selection of benchmarks, comparing the current commit to the last commit for performance regressions. Notes ----- ASV is set up to use ``nox --session=tests --install-only`` to prepare the benchmarking environment. This session environment must use a Python version that is also available for ``--session=tests``. """ session.install("asv", "nox") session.cd("benchmarks") # Skip over setup questions for a new machine. session.run("asv", "machine", "--yes") def asv_exec(*sub_args: str) -> None: run_args = ["asv", *sub_args] session.run(*run_args) if ci_mode: # If on a PR: compare to the base (target) branch. # Else: compare to previous commit. previous_commit = os.environ.get("PR_BASE_SHA", "HEAD^1") try: asv_exec( "continuous", "--factor=1.2", previous_commit, "HEAD", "--attribute", "rounds=4", ) finally: asv_exec("compare", previous_commit, "HEAD") else: # f5ceb808 = first commit supporting nox --install-only . asv_exec("run", "f5ceb808..HEAD")
def gallery(session: nox.sessions.Session): """ Perform iris gallery doc-tests. Parameters ---------- session: object A `nox.sessions.Session` object. """ prepare_venv(session) session.install("--no-deps", "--editable", ".") session.run( "python", "-m", "iris.tests.runner", "--gallery-tests", )
def tests(session: nox.sessions.Session): """ Perform iris system, integration and unit tests. Parameters ---------- session: object A `nox.sessions.Session` object. """ prepare_venv(session) session.install("--no-deps", "--editable", ".") session.run( "python", "-m", "iris.tests.runner", "--default-tests", "--system-tests", )
def tests(session: nox.sessions.Session): """ Perform esmf-regrid integration and unit tests. Parameters ---------- session: object A `nox.sessions.Session` object. """ _prepare_env(session) # Install the esmf-regrid source in develop mode. session.install("--no-deps", "--editable", ".") if COVERAGE: # Execute the tests with code coverage. session.run("pytest", "--cov-report=xml", "--cov") session.run("codecov") else: # Execute the tests. session.run("pytest")
def linkcheck(session: nox.sessions.Session): """ Perform iris doc link check. Parameters ---------- session: object A `nox.sessions.Session` object. """ prepare_venv(session) session.install("--no-deps", "--editable", ".") session.cd("docs") session.run( "make", "clean", "html", external=True, ) session.run( "make", "linkcheck", external=True, )
def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) if len(test_list) == 0: print("No tests found, skipping directory.") return if TEST_CONFIG["pip_version_override"]: pip_version = TEST_CONFIG["pip_version_override"] session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" concurrent_args = [] if os.path.exists("requirements.txt"): if os.path.exists("constraints.txt"): session.install("-r", "requirements.txt", "-c", "constraints.txt") else: session.install("-r", "requirements.txt") with open("requirements.txt") as rfile: packages = rfile.read() if os.path.exists("requirements-test.txt"): if os.path.exists("constraints-test.txt"): session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") else: session.install("-r", "requirements-test.txt") with open("requirements-test.txt") as rtfile: packages += rtfile.read() if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) if post_install: post_install(session) if "pytest-parallel" in packages: concurrent_args.extend( ['--workers', 'auto', '--tests-per-worker', 'auto']) elif "pytest-xdist" in packages: concurrent_args.extend(['-n', 'auto']) session.run( "pytest", *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), # Pytest will return 5 when no tests are collected. This can happen # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], env=get_pytest_env_vars(), )
def update_lockfiles(session: nox.sessions.Session): """ Re-resolve env specs and store as lockfiles (``requirements/nox.lock/``). Original Conda environment specifications are at: ``requirements/py**.yml``. The output lock files denote the dependencies that iris-esmf-regrid is tested against, and therefore officially supports. Parameters ---------- session: object A `nox.sessions.Session` object. """ session.install("conda-lock") for req_file in Path("requirements").glob(r"py[0-9]*.yml"): python_string = req_file.stem # Generate the appropriate conda-lock template name, keeping the {platform} # placeholder to support conda-lock's internals. filename_template = _lockfile_path(python_string, platform_placeholder=True) lockfile_path = _lockfile_path(python_string, platform_placeholder=False) # Create the parent directory if it doesn't already exist. try: filename_template.parent.mkdir() except FileExistsError: pass # Use a copy of the requirements file in a tmp dir - the file will # be modified if installing a custom Iris checkout. tmp_dir = Path(session.create_tmp()) req_file_local = tmp_dir / req_file.name shutil.copy(req_file, req_file_local) conda_lock_cmd = [ "conda-lock", "lock", f"--filename-template={filename_template}", f"--file={req_file_local}", f"--platform={LOCKFILE_PLATFORM}", ] # Get the requirements for Iris too, if an Iris checkout is specified. iris_artifact = _get_iris_github_artifact(session) if iris_artifact: # Remove ``iris`` from dependencies, if present. with req_file_local.open("r+") as file: reqs = yaml.load(file, Loader=yaml.FullLoader) reqs["dependencies"] = [ spec for spec in reqs["dependencies"] if not spec.startswith("iris") ] yaml.dump(reqs, file) iris_req_name = f"{python_string}.yml" iris_req_url = (f"https://raw.githubusercontent.com/SciTools/iris/" f"{iris_artifact}/requirements/ci/{iris_req_name}") iris_req_file = (tmp_dir / iris_req_name).with_stem(f"{python_string}-iris") iris_req = urlopen(iris_req_url).read() with iris_req_file.open("wb") as file: file.write(iris_req) # Conda-lock can resolve multiple requirements files together. conda_lock_cmd.append(f"--file={iris_req_file}") session.run(*conda_lock_cmd, silent=True) print(f"Conda lock file created: {lockfile_path}")
def blacken(session: nox.sessions.Session) -> None: """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files)
def blacken(session: nox.sessions.Session) -> None: session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files)
def lint(session: nox.sessions.Session) -> None: session.install("flake8==3.7.9", "black==19.10b0", "mypy==0.770") session.run("flake8", *PACKAGE_FILES) session.run("black", "--check", *PACKAGE_FILES)
def format(session: nox.sessions.Session) -> None: session.install("black==19.10b0", "isort==4.3.21") session.run("isort", "--recursive", *PACKAGE_FILES) session.run("black", *PACKAGE_FILES)
def typecheck(session: nox.sessions.Session) -> None: session.install("-r", "requirements.txt") session.run("mypy", *PACKAGE_FILES)
def benchmarks(session: nox.sessions.Session, ci_mode: bool, long_mode: bool, gh_pages: bool): """ Perform esmf-regrid performance benchmarks (using Airspeed Velocity). Parameters ---------- session: object A `nox.sessions.Session` object. ci_mode: bool Run a cut-down selection of benchmarks, comparing the current commit to the last commit for performance regressions. long_mode: bool Run the long running benchmarks at the current head of the repo. gh_pages: bool Run ``asv gh-pages --rewrite`` once finished. Notes ----- ASV is set up to use ``nox --session=tests --install-only`` to prepare the benchmarking environment. """ session.install("asv", "nox", "pyyaml") if "DATA_GEN_PYTHON" in os.environ: print("Using existing data generation environment.") else: print("Setting up the data generation environment...") session.run("nox", "--session=tests", "--install-only", f"--python={session.python}") data_gen_python = next( Path(".nox").rglob( f"tests*/bin/python{session.python}")).resolve() session.env["DATA_GEN_PYTHON"] = data_gen_python print("Running ASV...") session.cd("benchmarks") # Skip over setup questions for a new machine. session.run("asv", "machine", "--yes") def asv_exec(*sub_args: str) -> None: run_args = ["asv", *sub_args] help_output = session.run(*run_args, "--help", silent=True) if "--python" in help_output: # Not all asv commands accept the --python kwarg. run_args.append(f"--python={session.python}") session.run(*run_args) if ci_mode: # If on a PR: compare to the base (target) branch. # Else: compare to previous commit. previous_commit = os.environ.get("CIRRUS_BASE_SHA", "HEAD^1") try: asv_exec("continuous", previous_commit, "HEAD", "--bench=ci", "--factor=2") finally: asv_exec("compare", previous_commit, "HEAD", "--factor=2") elif long_mode: asv_exec("run", "HEAD^!", "--bench=long") else: # f32f23a5 = first supporting commit for nox_asv_plugin.py . asv_exec("run", "f32f23a5..HEAD") if gh_pages: asv_exec("gh-pages", "--rewrite")
def benchmarks( session: nox.sessions.Session, run_type: Literal["overnight", "branch", "custom"], ): """ Perform Iris performance benchmarks (using Airspeed Velocity). All run types require a single Nox positional argument (e.g. ``nox --session="foo" -- my_pos_arg``) - detailed in the parameters section - and can optionally accept a series of further arguments that will be added to session's ASV command. Parameters ---------- session: object A `nox.sessions.Session` object. run_type: {"overnight", "branch", "custom"} * ``overnight``: benchmarks all commits between the input **first commit** to ``HEAD``, comparing each to its parent for performance shifts. If a commit causes shifts, the output is saved to a file: ``.asv/performance-shifts/<commit-sha>``. Designed for checking the previous 24 hours' commits, typically in a scheduled script. * ``branch``: Performs the same operations as ``overnight``, but always on two commits only - ``HEAD``, and ``HEAD``'s merge-base with the input **base branch**. Output from this run is never saved to a file. Designed for testing if the active branch's changes cause performance shifts - anticipating what would be caught by ``overnight`` once merged. **For maximum accuracy, avoid using the machine that is running this session. Run time could be >1 hour for the full benchmark suite.** * ``custom``: run ASV with the input **ASV sub-command**, without any preset arguments - must all be supplied by the user. So just like running ASV manually, with the convenience of re-using the session's scripted setup steps. Examples -------- * ``nox --session="benchmarks(overnight)" -- a1b23d4`` * ``nox --session="benchmarks(branch)" -- upstream/main`` * ``nox --session="benchmarks(branch)" -- upstream/mesh-data-model`` * ``nox --session="benchmarks(branch)" -- upstream/main --bench=regridding`` * ``nox --session="benchmarks(custom)" -- continuous a1b23d4 HEAD --quick`` """ # The threshold beyond which shifts are 'notable'. See `asv compare`` docs # for more. COMPARE_FACTOR = 1.2 session.install("asv", "nox") data_gen_var = "DATA_GEN_PYTHON" if data_gen_var in os.environ: print("Using existing data generation environment.") else: print("Setting up the data generation environment...") # Get Nox to build an environment for the `tests` session, but don't # run the session. Will re-use a cached environment if appropriate. session.run_always( "nox", "--session=tests", "--install-only", f"--python={_PY_VERSION_LATEST}", ) # Find the environment built above, set it to be the data generation # environment. data_gen_python = next( Path(".nox").rglob( f"tests*/bin/python{_PY_VERSION_LATEST}")).resolve() session.env[data_gen_var] = data_gen_python mule_dir = data_gen_python.parents[1] / "resources" / "mule" if not mule_dir.is_dir(): print("Installing Mule into data generation environment...") session.run_always( "git", "clone", "https://github.com/metomi/mule.git", str(mule_dir), external=True, ) session.run_always( str(data_gen_python), "-m", "pip", "install", str(mule_dir / "mule"), external=True, ) print("Running ASV...") session.cd("benchmarks") # Skip over setup questions for a new machine. session.run("asv", "machine", "--yes") # All run types require one Nox posarg. run_type_arg = { "overnight": "first commit", "branch": "base branch", "custom": "ASV sub-command", } if run_type not in run_type_arg.keys(): message = f"Unsupported run-type: {run_type}" raise NotImplementedError(message) if not session.posargs: message = (f"Missing mandatory first Nox session posarg: " f"{run_type_arg[run_type]}") raise ValueError(message) first_arg = session.posargs[0] # Optional extra arguments to be passed down to ASV. asv_args = session.posargs[1:] def asv_compare(*commits): """Run through a list of commits comparing each one to the next.""" commits = [commit[:8] for commit in commits] shifts_dir = Path(".asv") / "performance-shifts" for i in range(len(commits) - 1): before = commits[i] after = commits[i + 1] asv_command_ = f"asv compare {before} {after} --factor={COMPARE_FACTOR} --split" session.run(*asv_command_.split(" ")) if run_type == "overnight": # Record performance shifts. # Run the command again but limited to only showing performance # shifts. shifts = session.run(*asv_command_.split(" "), "--only-changed", silent=True) if shifts: # Write the shifts report to a file. # Dir is used by .github/workflows/benchmarks.yml, # but not cached - intended to be discarded after run. shifts_dir.mkdir(exist_ok=True, parents=True) shifts_path = (shifts_dir / after).with_suffix(".txt") with shifts_path.open("w") as shifts_file: shifts_file.write(shifts) # Common ASV arguments used for both `overnight` and `bench` run_types. asv_harness = "asv run {posargs} --attribute rounds=4 --interleave-rounds --strict --show-stderr" if run_type == "overnight": first_commit = first_arg commit_range = f"{first_commit}^^.." asv_command = asv_harness.format(posargs=commit_range) session.run(*asv_command.split(" "), *asv_args) # git rev-list --first-parent is the command ASV uses. git_command = f"git rev-list --first-parent {commit_range}" commit_string = session.run(*git_command.split(" "), silent=True, external=True) commit_list = commit_string.rstrip().split("\n") asv_compare(*reversed(commit_list)) elif run_type == "branch": base_branch = first_arg git_command = f"git merge-base HEAD {base_branch}" merge_base = session.run(*git_command.split(" "), silent=True, external=True)[:8] with NamedTemporaryFile("w") as hashfile: hashfile.writelines([merge_base, "\n", "HEAD"]) hashfile.flush() commit_range = f"HASHFILE:{hashfile.name}" asv_command = asv_harness.format(posargs=commit_range) session.run(*asv_command.split(" "), *asv_args) asv_compare(merge_base, "HEAD") else: asv_subcommand = first_arg assert run_type == "custom" session.run("asv", asv_subcommand, *asv_args)