def bump(force, spec): status = run("git status --porcelain").strip() if len(status) > 0: raise Exception("Must be in a clean git state with no untracked files") curr = parse_version(get_version()) if spec == 'next': spec = f"{curr.major}.{curr.minor}." if curr.pre: p, x = curr.pre spec += f"{curr.micro}{p}{x + 1}" else: spec += f"{curr.micro + 1}" version = parse_version(spec) # bump the Python package python_cmd = f"{TBUMP_CMD} {version}" run(python_cmd) # convert the Python version js_version = f"{version.major}.{version.minor}.{version.micro}" if version.pre: p, x = version.pre p = p.replace("a", "alpha").replace("b", "beta") js_version += f"-{p}.{x}" # bump the JS packages lerna_cmd = f"{LERNA_CMD} {js_version}" if force: lerna_cmd += " --yes" run(lerna_cmd)
def test_tag_release(py_package, runner, build_mock, git_prep): # Bump the version runner(["bump-version", "--version-spec", VERSION_SPEC]) # Create the dist files util.run("python -m build .", cwd=util.CHECKOUT_NAME) # Tag the release runner(["tag-release"])
def test_build_changelog_existing(py_package, mocker, runner): changelog_file = "CHANGELOG.md" changelog_path = Path(util.CHECKOUT_NAME) / changelog_file runner(["prep-git", "--git-url", py_package]) runner(["bump-version", "--version-spec", VERSION_SPEC]) mocked_gen = mocker.patch("jupyter_releaser.changelog.generate_activity_md") mocked_gen.return_value = CHANGELOG_ENTRY runner(["build-changelog", "--changelog-path", changelog_file]) text = changelog_path.read_text(encoding="utf-8") text = text.replace("defining contributions", "Definining contributions") changelog_path.write_text(text, encoding="utf-8") # Commit the change run('git commit -a -m "commit changelog"', cwd=util.CHECKOUT_NAME) mocked_gen.return_value = CHANGELOG_ENTRY runner(["build-changelog", "--changelog-path", changelog_file]) text = changelog_path.read_text(encoding="utf-8") assert "Definining contributions" in text, text assert not "defining contributions" in text, text assert len(re.findall(changelog.START_MARKER, text)) == 1 assert len(re.findall(changelog.END_MARKER, text)) == 1 run("pre-commit run -a", cwd=util.CHECKOUT_NAME)
def test_build_changelog_backport(py_package, mocker, runner, open_mock): changelog_file = "CHANGELOG.md" changelog_path = Path(util.CHECKOUT_NAME) / changelog_file data = dict(title="foo", html_url="bar", user=dict(login="******", html_url="baz")) open_mock.return_value = MockHTTPResponse(data) runner(["prep-git", "--git-url", py_package]) runner(["bump-version", "--version-spec", VERSION_SPEC]) entry = CHANGELOG_ENTRY.replace("consideRatio", "meeseeksmachine") entry = entry.replace( "Support git references etc.", "Backport PR #50 (original title" ) mocked_gen = mocker.patch("jupyter_releaser.changelog.generate_activity_md") mocked_gen.return_value = entry runner(["build-changelog", "--changelog-path", changelog_file]) text = changelog_path.read_text(encoding="utf-8") assert changelog.START_MARKER in text assert changelog.END_MARKER in text assert "- foo [#50](bar) ([@snuffy](baz))" in text, text assert len(re.findall(changelog.START_MARKER, text)) == 1 assert len(re.findall(changelog.END_MARKER, text)) == 1 run("pre-commit run -a")
def create_python_package(git_repo): setuppy = git_repo / "setup.py" setuppy.write_text(SETUP_PY_TEMPLATE, encoding="utf-8") setuppy = git_repo / "setup.cfg" setuppy.write_text(SETUP_CFG_TEMPLATE, encoding="utf-8") tbump = git_repo / "tbump.toml" tbump.write_text(TBUMP_BASE_TEMPLATE + TBUMP_PY_TEMPLATE, encoding="utf-8") pyproject = git_repo / "pyproject.toml" pyproject.write_text(PYPROJECT_TEMPLATE, encoding="utf-8") foopy = git_repo / "foo.py" foopy.write_text(PY_MODULE_TEMPLATE, encoding="utf-8") manifest = git_repo / "MANIFEST.in" manifest.write_text(MANIFEST_TEMPLATE, encoding="utf-8") here = Path(__file__).parent text = here.parent.parent.joinpath(".pre-commit-config.yaml").read_text( encoding="utf-8") pre_commit = git_repo / ".pre-commit-config.yaml" pre_commit.write_text(text, encoding="utf-8") run("git add .") run('git commit -m "initial python package"') run("git checkout foo") run("git pull origin bar") run("git checkout bar") return git_repo
def build_entry(branch, repo, auth, changelog_path, resolve_backports): """Build a python version entry""" repo = repo or util.get_repo() branch = branch or util.get_branch() # Get the new version version = util.get_version() # Get the existing changelog and run some validation changelog = Path(changelog_path).read_text(encoding="utf-8") if START_MARKER not in changelog or END_MARKER not in changelog: raise ValueError("Missing insert marker for changelog") if changelog.find(START_MARKER) != changelog.rfind(START_MARKER): raise ValueError("Insert marker appears more than once in changelog") # Get changelog entry entry = get_version_entry( f"origin/{branch}", repo, version, auth=auth, resolve_backports=resolve_backports, ) changelog = insert_entry(changelog, entry, version=version) Path(changelog_path).write_text(changelog, encoding="utf-8") # Stage changelog util.run(f"git add {util.normalize_path(changelog_path)}")
def check_links(ignore_glob, ignore_links, cache_file, links_expire): """Check URLs for HTML-containing files.""" cache_dir = osp.expanduser(cache_file).replace(os.sep, "/") os.makedirs(cache_dir, exist_ok=True) cmd = "pytest --noconftest --check-links --check-links-cache " cmd += f"--check-links-cache-expire-after {links_expire} " cmd += f"--check-links-cache-name {cache_dir}/check-release-links " ignored = [] for spec in ignore_glob: cmd += f" --ignore-glob {spec}" ignored.extend(glob(spec, recursive=True)) for spec in ignore_links: cmd += f" --check-links-ignore {spec}" cmd += " --ignore node_modules" # Gather all of the markdown, RST, and ipynb files files = [] for ext in [".md", ".rst", ".ipynb"]: matched = glob(f"**/*{ext}", recursive=True) files.extend(m for m in matched if not m in ignored) cmd += " " + " ".join(files) try: util.run(cmd) except Exception: util.run(cmd + " --lf")
def test_create_release_commit(py_package, build_mock): util.bump_version("0.0.2a0") version = util.get_version() util.run("python -m build .") shas = util.create_release_commit(version) assert util.normalize_path("dist/foo-0.0.2a0.tar.gz") in shas assert util.normalize_path("dist/foo-0.0.2a0-py3-none-any.whl") in shas
def check_dist(dist_file, test_cmd=""): """Check a Python package locally (not as a cli)""" dist_file = util.normalize_path(dist_file) util.run(f"twine check {dist_file}") if not test_cmd: # Get the package name from the dist file name name = re.match(r"(\S+)-\d", osp.basename(dist_file)).groups()[0] name = name.replace("-", "_") test_cmd = f'python -c "import {name}"' # Create venvs to install dist file # run the test command in the venv with TemporaryDirectory() as td: env_path = util.normalize_path(osp.abspath(td)) if os.name == "nt": # pragma: no cover bin_path = f"{env_path}/Scripts/" else: bin_path = f"{env_path}/bin" # Create the virtual env, upgrade pip, # install, and run test command util.run(f"python -m venv {env_path}") util.run(f"{bin_path}/python -m pip install -U pip") util.run(f"{bin_path}/pip install -q {dist_file}") util.run(f"{bin_path}/{test_cmd}")
def test_check_links(py_package, runner): readme = Path("README.md") text = readme.read_text(encoding="utf-8") text += "\nhttps://apod.nasa.gov/apod/astropix.html" readme.write_text(text, encoding="utf-8") pyproject = util.toml.loads(util.PYPROJECT.read_text(encoding="utf-8")) pyproject["tool"] = {"jupyter-releaser": dict()} pyproject["tool"]["jupyter-releaser"]["options"] = { "ignore-glob": ["FOO.md"] } util.PYPROJECT.write_text(util.toml.dumps(pyproject), encoding="utf-8") util.run("git commit -a -m 'update files'") runner(["prep-git", "--git-url", py_package]) runner(["check-links"]) foo = Path(util.CHECKOUT_NAME) / "FOO.md" foo.write_text("http://127.0.0.1:5555") bar = Path(util.CHECKOUT_NAME) / "BAR BAZ.md" bar.write_text("") runner(["check-links"])
def check_manifest(): """Check the project manifest""" if util.PYPROJECT.exists() or util.SETUP_PY.exists(): util.run("check-manifest -v") else: util.log( "Skipping check-manifest since there are no python package files")
def draft_changelog(version_spec, branch, repo, auth, dry_run): """Create a changelog entry PR""" repo = repo or util.get_repo() branch = branch or util.get_branch() version = util.get_version() tags = util.run("git --no-pager tag") if f"v{version}" in tags.splitlines(): raise ValueError(f"Tag v{version} already exists") # Check out any unstaged files from version bump util.run("git checkout -- .") title = f"{changelog.PR_PREFIX} for {version} on {branch}" commit_message = f'git commit -a -m "{title}"' body = title # Check for multiple versions if util.PACKAGE_JSON.exists(): body += npm.get_package_versions(version) body += '\n\nAfter merging this PR run the "Draft Release" Workflow with the following inputs' body += f""" | Input | Value | | ------------- | ------------- | | Target | {repo} | | Branch | {branch} | | Version Spec | {version_spec} | """ util.log(body) make_changelog_pr(auth, branch, repo, title, commit_message, body, dry_run=dry_run)
def py_dist(py_package, runner, mocker, build_mock, git_prep): changelog_entry = testutil.mock_changelog_entry(py_package, runner, mocker) # Create the dist files util.run("python -m build .", cwd=util.CHECKOUT_NAME) # Finalize the release runner(["tag-release"]) return py_package
def test_forwardport_changelog_no_new(npm_package, runner, mocker, open_mock, git_prep): open_mock.side_effect = [MockHTTPResponse([REPO_DATA]), MockHTTPResponse()] # Create a branch with a changelog entry util.run("git checkout -b backport_branch", cwd=util.CHECKOUT_NAME) util.run("git push origin backport_branch", cwd=util.CHECKOUT_NAME) mock_changelog_entry(npm_package, runner, mocker) util.run('git commit -a -m "Add changelog entry"', cwd=util.CHECKOUT_NAME) util.run(f"git tag v{VERSION_SPEC}", cwd=util.CHECKOUT_NAME) # Run the forwardport workflow against default branch os.chdir(util.CHECKOUT_NAME) url = os.getcwd() runner(["forwardport-changelog", HTML_URL, "--git-url", url]) assert len(open_mock.mock_calls) == 1 expected = """ <!-- <START NEW CHANGELOG ENTRY> --> ## 1.0.1 """ assert expected in Path("CHANGELOG.md").read_text(encoding="utf-8")
def test_check_links(py_package, runner): readme = Path("README.md") text = readme.read_text(encoding="utf-8") text += "\nhttps://apod.nasa.gov/apod/astropix.html" readme.write_text(text, encoding="utf-8") util.run("git commit -a -m 'update readme'") runner(["prep-git", "--git-url", py_package]) runner(["check-links"]) foo = Path(util.CHECKOUT_NAME) / "FOO.md" foo.write_text("http://127.0.0.1:5555") runner(["check-links", "--ignore-glob", "FOO.md"])
def tag_release(dist_dir, no_git_tag_workspace): """Create release commit and tag""" # Get the new version version = util.get_version() # Create the release commit util.create_release_commit(version, dist_dir) # Create the annotated release tag tag_name = f"v{version}" util.run(f'git tag {tag_name} -a -m "Release {tag_name}"') # Create annotated release tags for workspace packages if given if not no_git_tag_workspace: npm.tag_workspace_packages()
def build_dist(package, dist_dir): """Build npm dist file(s) from a package""" # Clean the dist folder of existing npm tarballs os.makedirs(dist_dir, exist_ok=True) dest = Path(dist_dir) for pkg in glob(f"{dist_dir}/*.tgz"): os.remove(pkg) if osp.isdir(package): basedir = package tarball = osp.join(package, util.run("npm pack", cwd=package).split("\n")[-1]) else: basedir = osp.dirname(package) tarball = package data = extract_package(tarball) # Move the tarball into the dist folder if public if not data.get("private", False) == True: shutil.move(str(tarball), str(dest)) elif osp.isdir(package): os.remove(tarball) if not osp.isdir(package): return if "workspaces" in data: all_data = dict() for pattern in _get_workspace_packages(data): for path in glob(osp.join(basedir, pattern), recursive=True): path = Path(path) package_json = path / "package.json" if not osp.exists(package_json): continue data = json.loads(package_json.read_text(encoding="utf-8")) if data.get("private", False) == True: continue data["__path__"] = path all_data[data["name"]] = data i = 0 for (name, data) in sorted(all_data.items()): i += 1 path = data["__path__"] util.log(f'({i}/{len(all_data)}) Packing {data["name"]}...') tarball = path / util.run("npm pack", cwd=path, quiet=True) shutil.move(str(tarball), str(dest))
def test_extract_dist_npm(npm_dist, runner, mocker, open_mock, tmp_path): os.makedirs("staging") shutil.move(f"{util.CHECKOUT_NAME}/dist", "staging") def helper(path, **kwargs): return MockRequestResponse(f"staging/dist/{path}") get_mock = mocker.patch("requests.get", side_effect=helper) dist_names = [osp.basename(f) for f in glob("staging/dist/*.tgz")] url = normalize_path(osp.join(os.getcwd(), util.CHECKOUT_NAME)) tag_name = f"v{VERSION_SPEC}" releases = [ dict( tag_name=tag_name, target_commitish="main", assets=[dict(name=dist_name, url=dist_name) for dist_name in dist_names], ) ] sha = run("git rev-parse HEAD", cwd=util.CHECKOUT_NAME) tags = [dict(ref=f"refs/tags/{tag_name}", object=dict(sha=sha))] open_mock.side_effect = [ MockHTTPResponse(releases), MockHTTPResponse(tags), MockHTTPResponse(dict(html_url=url)), ] runner(["extract-release", HTML_URL]) assert len(open_mock.mock_calls) == 3 assert len(get_mock.mock_calls) == len(dist_names) == 3
def check_dist(dist_dir): """Check npm dist file(s) in a dist dir""" tmp_dir = Path(TemporaryDirectory().name) os.makedirs(tmp_dir) util.run("npm init -y", cwd=tmp_dir) names = [] staging = tmp_dir / "staging" names = extract_dist(dist_dir, staging) install_str = " ".join(f"./staging/{name}" for name in names) util.run(f"npm install {install_str}", cwd=tmp_dir) shutil.rmtree(str(tmp_dir), ignore_errors=True)
def test_create_release_commit_hybrid(py_package, build_mock): # Add an npm package and test with that util.bump_version("0.0.2a0") version = util.get_version() testutil.create_npm_package(py_package) pkg_json = py_package / "package.json" data = json.loads(pkg_json.read_text(encoding="utf-8")) data["version"] = version pkg_json.write_text(json.dumps(data, indent=4), encoding="utf-8") txt = (py_package / "tbump.toml").read_text(encoding="utf-8") txt += testutil.TBUMP_NPM_TEMPLATE (py_package / "tbump.toml").write_text(txt, encoding="utf-8") util.run("python -m build .") shas = util.create_release_commit(version) assert len(shas) == 2 assert util.normalize_path("dist/foo-0.0.2a0.tar.gz") in shas
def publish_assets(dist_dir, npm_token, npm_cmd, twine_cmd, dry_run, use_checkout_dir): """Publish release asset(s)""" if use_checkout_dir: if not osp.exists(util.CHECKOUT_NAME): raise ValueError("Please run prep-git first") os.chdir(util.CHECKOUT_NAME) if dry_run: # Start local pypi server with no auth, allowing overwrites, # in a temporary directory temp_dir = TemporaryDirectory() cmd = f"pypi-server -p 8081 -P . -a . -o -v {temp_dir.name}" proc = Popen(shlex.split(cmd), stderr=PIPE) # Wait for the server to start while True: line = proc.stderr.readline().decode("utf-8").strip() util.log(line) if "Listening on" in line: break atexit.register(proc.kill) atexit.register(temp_dir.cleanup) twine_cmd = "twine upload --repository-url=http://localhost:8081" os.environ["TWINE_USERNAME"] = "******" os.environ["TWINE_PASSWORD"] = "******" npm_cmd = "npm publish --dry-run" if npm_token: npm.handle_auth_token(npm_token) found = False for path in glob(f"{dist_dir}/*.*"): name = Path(path).name suffix = Path(path).suffix if suffix in [".gz", ".whl"]: util.run(f"{twine_cmd} {name}", cwd=dist_dir) found = True elif suffix == ".tgz": util.run(f"{npm_cmd} {name}", cwd=dist_dir) found = True else: util.log(f"Nothing to upload for {name}") if not found: # pragma: no cover raise ValueError("No assets published, refusing to finalize release")
def tag_workspace_packages(): """Generate tags for npm workspace packages""" if not PACKAGE_JSON.exists(): return data = json.loads(PACKAGE_JSON.read_text(encoding="utf-8")) tags = util.run("git tag").splitlines() if not "workspaces" in data: return for pattern in _get_workspace_packages(data): for path in glob(pattern, recursive=True): sub_package_json = Path(path) / "package.json" sub_data = json.loads(sub_package_json.read_text(encoding="utf-8")) tag_name = f"{sub_data['name']}@{sub_data['version']}" if tag_name in tags: util.log(f"Skipping existing tag {tag_name}") else: util.run(f"git tag {tag_name}")
def test_extract_dist_py(py_package, runner, mocker, open_mock, tmp_path, git_prep): changelog_entry = mock_changelog_entry(py_package, runner, mocker) # Create the dist files run("python -m build .", cwd=util.CHECKOUT_NAME) # Finalize the release runner(["tag-release"]) os.makedirs("staging") shutil.move(f"{util.CHECKOUT_NAME}/dist", "staging") def helper(path, **kwargs): return MockRequestResponse(f"staging/dist/{path}") get_mock = mocker.patch("requests.get", side_effect=helper) tag_name = f"v{VERSION_SPEC}" dist_names = [osp.basename(f) for f in glob("staging/dist/*.*")] releases = [ dict( tag_name=tag_name, target_commitish=util.get_branch(), assets=[ dict(name=dist_name, url=dist_name) for dist_name in dist_names ], ) ] sha = run("git rev-parse HEAD", cwd=util.CHECKOUT_NAME) tags = [dict(ref=f"refs/tags/{tag_name}", object=dict(sha=sha))] url = normalize_path(osp.join(os.getcwd(), util.CHECKOUT_NAME)) open_mock.side_effect = [ MockHTTPResponse(releases), MockHTTPResponse(tags), MockHTTPResponse(dict(html_url=url)), ] runner(["extract-release", HTML_URL]) assert len(open_mock.mock_calls) == 3 assert len(get_mock.mock_calls) == len(dist_names) == 2
def test_publish_assets_py(py_package, runner, mocker, git_prep): # Create the dist files changelog_entry = mock_changelog_entry(py_package, runner, mocker) run("python -m build .", cwd=util.CHECKOUT_NAME) orig_run = util.run called = 0 def wrapped(cmd, **kwargs): nonlocal called if cmd.startswith("twine upload"): called += 1 return orig_run(cmd, **kwargs) mock_run = mocker.patch("jupyter_releaser.util.run", wraps=wrapped) dist_dir = py_package / util.CHECKOUT_NAME / "dist" runner(["publish-assets", "--dist-dir", dist_dir, "--dry-run"]) assert called == 2, called
def patch(force=False): version = get_version() if is_prerelease(version): raise Exception("Can only make a patch release from a final version") run("bumpversion patch", quiet=True) # switches to alpha run("bumpversion release --allow-dirty", quiet=True) # switches to beta run("bumpversion release --allow-dirty", quiet=True) # switches to rc. run("bumpversion release --allow-dirty", quiet=True) # switches to final. # Version the changed cmd = "jlpm run lerna version patch --no-push --force-publish --no-git-tag-version" if force: cmd += " --yes" run(cmd)
def publish_release(auth, dist_dir, npm_token, npm_cmd, twine_cmd, dry_run, release_url): """Publish release asset(s) and finalize GitHub release""" util.log(f"Publishing {release_url} in with dry run: {dry_run}") match = parse_release_url(release_url) if npm_token: npm.handle_auth_token(npm_token) found = False for path in glob(f"{dist_dir}/*.*"): name = Path(path).name suffix = Path(path).suffix if suffix in [".gz", ".whl"]: util.run(f"{twine_cmd} {name}", cwd=dist_dir) found = True elif suffix == ".tgz": util.run(f"{npm_cmd} {name}", cwd=dist_dir) found = True else: util.log(f"Nothing to upload for {name}") if not found: # pragma: no cover raise ValueError("No assets published, refusing to finalize release") # Take the release out of draft gh = GhApi(owner=match["owner"], repo=match["repo"], token=auth) release = util.release_for_url(gh, release_url) release = gh.repos.update_release( release.id, release.tag_name, release.target_commitish, release.name, release.body, dry_run, release.prerelease, ) # Set the GitHub action output util.actions_output("release_url", release.html_url)
def workspace_package(npm_package): pkg_file = npm_package / "package.json" data = json.loads(pkg_file.read_text(encoding="utf-8")) data["workspaces"] = dict(packages=["packages/*"]) data["private"] = True pkg_file.write_text(json.dumps(data), encoding="utf-8") prev_dir = Path(os.getcwd()) for name in ["foo", "bar", "baz"]: new_dir = prev_dir / "packages" / name os.makedirs(new_dir) os.chdir(new_dir) run("npm init -y") index = new_dir / "index.js" index.write_text('console.log("hello")', encoding="utf-8") if name == "foo": pkg_json = new_dir / "package.json" sub_data = json.loads(pkg_json.read_text(encoding="utf-8")) sub_data["dependencies"] = dict(bar="*") pkg_json.write_text(json.dumps(sub_data), encoding="utf-8") elif name == "baz": pkg_json = new_dir / "package.json" sub_data = json.loads(pkg_json.read_text(encoding="utf-8")) sub_data["dependencies"] = dict(foo="*") pkg_json.write_text(json.dumps(sub_data), encoding="utf-8") os.chdir(prev_dir) util.run("git add .") util.run('git commit -a -m "Add workspaces"') return npm_package
def test_build_changelog(py_package, mocker, runner): run("pre-commit run -a") changelog_path = "CHANGELOG.md" runner(["prep-git", "--git-url", py_package]) runner(["bump-version", "--version-spec", VERSION_SPEC]) mocked_gen = mocker.patch("jupyter_releaser.changelog.generate_activity_md") mocked_gen.return_value = CHANGELOG_ENTRY runner(["build-changelog", "--changelog-path", changelog_path]) changelog_path = Path(util.CHECKOUT_NAME) / "CHANGELOG.md" text = changelog_path.read_text(encoding="utf-8") assert changelog.START_MARKER in text assert changelog.END_MARKER in text assert PR_ENTRY in text assert len(re.findall(changelog.START_MARKER, text)) == 1 assert len(re.findall(changelog.END_MARKER, text)) == 1 run("pre-commit run -a")
def check_links(ignore_glob, ignore_links, cache_file, links_expire): """Check URLs for HTML-containing files.""" cache_dir = osp.expanduser(cache_file).replace(os.sep, "/") os.makedirs(cache_dir, exist_ok=True) cmd = "pytest --noconftest --check-links --check-links-cache " cmd += f"--check-links-cache-expire-after {links_expire} " cmd += f"--check-links-cache-name {cache_dir}/check-release-links " ignored = [] for spec in ignore_glob: cmd += f' --ignore-glob "{spec}"' ignored.extend(glob(spec, recursive=True)) ignore_links = list(ignore_links) + [ "https://github.com/.*/(pull|issues)/.*", "https://github.com/search?", "http://localhost.*", ] for spec in ignore_links: cmd += f' --check-links-ignore "{spec}"' cmd += " --ignore node_modules" # Gather all of the markdown, RST, and ipynb files files = [] for ext in [".md", ".rst", ".ipynb"]: matched = glob(f"**/*{ext}", recursive=True) files.extend(m for m in matched if not m in ignored and "node_modules" not in m) for f in files: file_cmd = cmd + f' "{f}"' try: util.run(file_cmd) except Exception as e: # Return code 5 means no tests were run (no links found) if e.returncode != 5: util.run(file_cmd + " --lf")
def test_publish_release_py(py_package, runner, mocker, open_mock, git_prep): open_mock.side_effect = [MockHTTPResponse([REPO_DATA]), MockHTTPResponse()] # Create the dist files changelog_entry = mock_changelog_entry(py_package, runner, mocker) run("python -m build .", cwd=util.CHECKOUT_NAME) orig_run = util.run called = 0 def wrapped(cmd, **kwargs): nonlocal called if cmd.startswith("twine upload"): called += 1 return orig_run(cmd, **kwargs) mock_run = mocker.patch("jupyter_releaser.util.run", wraps=wrapped) dist_dir = py_package / util.CHECKOUT_NAME / "dist" runner(["publish-release", HTML_URL, "--dist-dir", dist_dir, "--dry-run"]) assert len(open_mock.call_args) == 2 assert called == 2, called