def upload_packages(args): channel = args.channel if channel.isalnum(): # this looks like a channel name, no URL given print(f"Posting file to {channel}") channel_url = urljoin_all([args.server, "api/channels", channel]) print("Channel url: ", channel_url) else: parts = urlparse(channel) if parts.path[:4] != "/api": parts = parts._replace(path=f"/api{parts.path}") channel_url = urlunparse(parts) else: channel_url = channel package_file_names = args.packages # Find packages in directory if the single package argument is a directory if len(package_file_names) == 1 and os.path.isdir(package_file_names[0]): path = package_file_names.pop() for root, dirs, files in os.walk(path): for file in files: if '.json.' not in file and (file.endswith('.tar.bz2') or file.endswith('.conda')): package_file_names.append(os.path.join(root, file)) verifier = Verify() if args.verify: verify_ignore = args.verify_ignore.split( ',') if args.verify_ignore else None for package in package_file_names: verifier.verify_package( path_to_package=package, checks_to_ignore=verify_ignore, exit_on_error=True, ) files = [('files', open(package, 'rb')) for package in package_file_names] if args.force: files.append(('force', (None, 'true'))) api_key = get_installed_api_key(channel_url) url = f'{channel_url}/files/' if args.dry_run: package_lines = "\n ".join(package_file_names) print(f'QUETZ_API_KEY found: {not not api_key}\n' f'URL: {url}\n' f'packages:\n {package_lines} ') else: response = requests.post(url, files=files, headers={'X-API-Key': api_key}) if response.status_code != 201: print('Request failed:\n' f' HTTP status code: {response.status_code}\n' f' Message: {str(response.content.decode("utf-8"))}') sys.exit(1)
def test_invalid_test_file_path(recipe_dir): recipe = os.path.join(recipe_dir, 'invalid_test_file_path') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2123] Found file "../test-data.txt" listed outside recipe directory' in error
def test_invalid_license_family(recipe_dir): recipe = os.path.join(recipe_dir, 'invalid_license_family') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2122] Found invalid license family "The Extra License"' in error
def test_invalid_test_files(recipe_dir): recipe = os.path.join(recipe_dir, 'invalid_test_files') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2124] Found file "test-data.txt" in meta.yaml that doesn\'t exist' in error
def test_no_package_name(recipe_dir): recipe = os.path.join(recipe_dir, 'no_package_name') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2101] Missing package name in meta.yaml' in error
def test_invalid_run_requirement_name(recipe_dir): recipe = os.path.join(recipe_dir, 'invalid_run_requirement_name') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2112] Found invalid run requirement "python@#"' in error
def test_invalid_build_number_negative(recipe_dir): recipe = os.path.join(recipe_dir, 'invalid_build_number_negative') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2108] Build number in info/index.json cannot be a negative integer' in error
def test_invalid_outputs(recipe_dir): recipe = os.path.join(recipe_dir, 'invalid_output') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2110] Found invalid field "srcitp" in section "outputs"' in error
def test_invalid_package_version_prefix(recipe_dir): recipe = os.path.join(recipe_dir, 'invalid_package_version_prefix') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2105] Found invalid package version "_1.0.0rc3" in meta.yaml' in error
def test_invalid_package_version_sequence(recipe_dir): recipe = os.path.join(recipe_dir, 'invalid_package_version_sequence') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2106] Found invalid sequence "._" in package version' in error
def test_invalid_package_name(recipe_dir): recipe = os.path.join(recipe_dir, 'invalid_package_name') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2102] Found invalid package name "some_package." in meta.yaml' in error
def test_invalid_package_field(recipe_dir): recipe = os.path.join(recipe_dir, 'invalid_package_field') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2109] Found invalid section "extra_field"' in error
def test_invalid_requirements_field_key(recipe_dir): recipe = os.path.join(recipe_dir, 'invalid_requirements_field_key') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2110] Found invalid field "extra_field" in section "requirements"' in error
def test_invalid_multiple_source_field_key(recipe_dir): recipe = os.path.join(recipe_dir, 'invalid_multiple_sources') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2110] Found invalid field "gti_url" in section "source"' in error
def test_invalid_about_summary(recipe_dir): recipe = os.path.join(recipe_dir, 'invalid_about_summary') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2117] Found summary with length greater than 80 characters' in error
def test_conda_forge_example_recipe(recipe_dir): recipe = os.path.join(recipe_dir, 'conda_forge') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2126] Found conda-forge comment in meta.yaml file' in error
def test_invalid_about_url(recipe_dir): recipe = os.path.join(recipe_dir, 'invalid_about_url') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2118] Found invalid URL "www.continuum.io" in meta.yaml' in error
def test_invalid_sources(recipe_dir): recipe = os.path.join(recipe_dir, 'invalid_sources') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2121] Found both git_branch and git_tag in meta.yaml source field' in error
def test_invalid_source_hash(recipe_dir): recipe = os.path.join(recipe_dir, 'invalid_source_hash') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2119] Found invalid hash "abc123" in meta.yaml' in error
def test_invalid_build_requirement_version(recipe_dir): recipe = os.path.join(recipe_dir, 'invalid_build_requirement_version') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert '[C2114] Found invalid dependency "setuptools >= 3.4 < 3045" in meta.yaml' in error
def test_duplicate_version_specifications(recipe_dir): recipe = os.path.join(recipe_dir, 'duplicate_version_specs') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert "[C2116] Found duplicate run requirements: ['python', 'python']" in error
def test_invalid_dir_content_filesize(recipe_dir): recipe = os.path.join(recipe_dir, 'invalid_dir_content_filesize') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert any('[C2125] Found disallowed file with extension' in e for e in error) assert any('test.tar.bz2' in e for e in error)
def _submit_verify_package(path, ignore): package_issues = (path, None) try: package_issues = Verify.verify_package(path_to_package=path, checks_to_ignore=ignore, exit_on_error=False) except (KeyError, OSError) as e: package_issues = (path, [str(e)]) return package_issues
def test_duplicate_build_requirements(recipe_dir): recipe = os.path.join(recipe_dir, 'duplicate_build_requirements') metadata = utilities.render_metadata(recipe, None) with pytest.raises(RecipeError): Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=True) package, error = Verify.verify_recipe(rendered_meta=metadata, recipe_dir=recipe, exit_on_error=False) assert ( "[C2115] Found duplicate build requirements: ['python', 'python']" in error or "[C2115] Found duplicate build requirements: ['python 3.6.*', 'python 3.6.*']" in error)
def cli(path, ignore, exit): """conda-verify is a tool for validating conda packages and recipes. To validate a package:\n $ conda-verify path/to/package.tar.bz2 To validate a recipe:\n $ conda-verify path/to/recipe_directory/ """ verifier = Verify() if ignore: ignore = ignore.split(',') meta_file = os.path.join(path, 'meta.yaml') if os.path.isfile(meta_file): print('Verifying {}...'.format(meta_file)) for cfg in iter_cfgs(): meta = render_metadata(path, cfg) if meta.get('build', {}).get('skip', '').lower() != 'true': verifier.verify_recipe(rendered_meta=meta, recipe_dir=path, checks_to_ignore=ignore, exit_on_error=exit) elif path.endswith(('.tar.bz2', '.tar')): print('Verifying {}...'.format(path)) verifier.verify_package(path_to_package=path, checks_to_ignore=ignore, exit_on_error=exit)
def main(): p = OptionParser( usage="usage: %prog [options] <path to recipes or packages>", description="tool for (passively) verifying conda recipes and conda " "packages for the Anaconda distribution") p.add_option('-e', "--exit", help="on error exit", action="store_true") p.add_option('-p', "--pedantic", action="store_true") p.add_option('-q', "--quiet", action="store_true") p.add_option('-V', '--version', help="display the version being used and exit", action="store_true") opts, args = p.parse_args() verbose = not opts.quiet if opts.version: from conda_verify import __version__ print('conda-verify version:', __version__) return verifier = Verify() for path in args: if isfile(join(path, 'meta.yaml')): if verbose: print("==> %s <==" % path) for cfg in iter_cfgs(): meta = render_metadata(path, cfg) try: verifier.verify_recipe(pedantic=opts.pedantic, rendered_meta=meta, recipe_dir=path) except RecipeError as e: sys.stderr.write("RecipeError: %s\n" % e) if opts.exit: sys.exit(1) elif path.endswith('.tar.bz2'): if verbose: print("==> %s <==" % path) try: verifier.verify_package(pedantic=opts.pedantic, path_to_package=path, verbose=verbose) except PackageError as e: sys.stderr.write("PackageError: %s\n" % e) if opts.exit: sys.exit(1) else: if verbose: print("Ignoring: %s" % path)
def verifier(): package_verifier = Verify() return package_verifier
def main(): parser = argparse.ArgumentParser(usage="%(prog)s url package", description="Uploads package to Quetz.") parser.add_argument( "--dry-run", action='store_true', help="Print what would happen, without uploading the package(s)") parser.add_argument("--verify", action='store_true', help="Verify package(s) with conda-verify") parser.add_argument( "--verify-ignore", type=str, help= "Ignore specific checks. Each check must be separated by a single comma" ) parser.add_argument( "--force", action='store_true', help= "Allow overwriting an exiting package version. (Only allowed with channel owner role)" ) parser.add_argument( "-v", "--version", action="version", version=f"quetz-client version {quetz_client.__version__}") parser.add_argument("channel_url") parser.add_argument("packages", nargs='+', help="package(s) or build root") args = parser.parse_args() channel_url = args.channel_url parts = urlparse(channel_url) if parts.path[:4] != "/api": parts = parts._replace(path=f"/api{parts.path}") channel_url = urlunparse(parts) package_file_names = args.packages # Find packages in directory if the single package argument is a directory if len(package_file_names) == 1 and os.path.isdir(package_file_names[0]): path = package_file_names.pop() for root, dirs, files in os.walk(path): for file in files: if '.json.' not in file and (file.endswith('.tar.bz2') or file.endswith('.conda')): package_file_names.append(os.path.join(root, file)) verifier = Verify() if args.verify: verify_ignore = args.verify_ignore.split( ',') if args.verify_ignore else None for package in package_file_names: verifier.verify_package( path_to_package=package, checks_to_ignore=verify_ignore, exit_on_error=True, ) files = [('files', open(package, 'rb')) for package in package_file_names] if args.force: files.append(('force', (None, 'true'))) api_key = os.getenv('QUETZ_API_KEY') url = f'{channel_url}/files/' if args.dry_run: package_lines = "\n ".join(package_file_names) print(f'QUETZ_API_KEY found: {not not api_key}\n' f'URL: {url}\n' f'packages:\n {package_lines} ') else: response = requests.post(url, files=files, headers={'X-API-Key': api_key}) if response.status_code != 201: print('Request failed:\n' f' HTTP status code: {response.status_code}\n' f' Message: {str(response.content.decode("utf-8"))}') sys.exit(1)
def verifier(): recipe_verifier = Verify() return recipe_verifier
def build(m, config, post=None, need_source_download=True, need_reparse_in_env=False): ''' Build the package with the specified metadata. :param m: Package metadata :type m: Metadata :type post: bool or None. None means run the whole build. True means run post only. False means stop just before the post. :type keep_old_work: bool: Keep any previous work directory. :type need_source_download: bool: if rendering failed to download source (due to missing tools), retry here after build env is populated ''' if m.skip(): print_skip_message(m) return False if config.skip_existing: package_exists = is_package_built(m, config) if package_exists: print(m.dist(), "is already built in {0}, skipping.".format(package_exists)) return False if post in [False, None]: print("BUILD START:", m.dist()) if m.uses_jinja and (need_source_download or need_reparse_in_env): print(" (actual version deferred until further download or env creation)") specs = [ms.spec for ms in m.ms_depends('build')] create_env(config.build_prefix, specs, config=config) vcs_source = m.uses_vcs_in_build if vcs_source and vcs_source not in specs: vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source has_vcs_available = os.path.isfile(external.find_executable(vcs_executable, config.build_prefix) or "") if not has_vcs_available: if (vcs_source != "mercurial" or not any(spec.startswith('python') and "3." in spec for spec in specs)): specs.append(vcs_source) log.warn("Your recipe depends on %s at build time (for templates), " "but you have not listed it as a build dependency. Doing " "so for this build.", vcs_source) # Display the name only # Version number could be missing due to dependency on source info. create_env(config.build_prefix, specs, config=config) else: raise ValueError("Your recipe uses mercurial in build, but mercurial" " does not yet support Python 3. Please handle all of " "your mercurial actions outside of your build script.") if need_source_download: # Execute any commands fetching the source (e.g., git) in the _build environment. # This makes it possible to provide source fetchers (eg. git, hg, svn) as build # dependencies. with path_prepended(config.build_prefix): m, need_source_download, need_reparse_in_env = parse_or_try_download(m, no_download_source=False, force_download=True, config=config) assert not need_source_download, "Source download failed. Please investigate." if m.uses_jinja: print("BUILD START (revised):", m.dist()) if need_reparse_in_env: reparse(m, config=config) print("BUILD START (revised):", m.dist()) if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]: print("%s is installed as a build dependency. Removing." % m.name()) index = get_build_index(config=config, clear_cache=False) actions = plan.remove_actions(config.build_prefix, [m.name()], index=index) assert not plan.nothing_to_do(actions), actions plan.display_actions(actions, index) plan.execute_actions(actions, index) print("Package:", m.dist()) # get_dir here might be just work, or it might be one level deeper, # dependening on the source. src_dir = config.work_dir if isdir(src_dir): print("source tree in:", src_dir) else: print("no source - creating empty work folder") os.makedirs(src_dir) rm_rf(config.info_dir) files1 = prefix_files(prefix=config.build_prefix) for pat in m.always_include_files(): has_matches = False for f in set(files1): if fnmatch.fnmatch(f, pat): print("Including in package existing file", f) files1.discard(f) has_matches = True if not has_matches: log.warn("Glob %s from always_include_files does not match any files", pat) # Save this for later with open(join(config.croot, 'prefix_files.txt'), 'w') as f: f.write(u'\n'.join(sorted(list(files1)))) f.write(u'\n') # Use script from recipe? script = m.get_value('build/script', None) if script: if isinstance(script, list): script = '\n'.join(script) if isdir(src_dir): if on_win: build_file = join(m.path, 'bld.bat') if script: build_file = join(src_dir, 'bld.bat') with open(build_file, 'w') as bf: bf.write(script) import conda_build.windows as windows windows.build(m, build_file, config=config) else: build_file = join(m.path, 'build.sh') # There is no sense in trying to run an empty build script. if isfile(build_file) or script: with path_prepended(config.build_prefix): env = environ.get_dict(config=config, m=m) env["CONDA_BUILD_STATE"] = "BUILD" work_file = join(config.work_dir, 'conda_build.sh') if script: with open(work_file, 'w') as bf: bf.write(script) if config.activate: if isfile(build_file): data = open(build_file).read() else: data = open(work_file).read() with open(work_file, 'w') as bf: bf.write('source "{conda_root}activate" "{build_prefix}" &> ' '/dev/null\n'.format(conda_root=root_script_dir + os.path.sep, build_prefix=config.build_prefix)) bf.write(data) else: if not isfile(work_file): copy_into(build_file, work_file, config.timeout) os.chmod(work_file, 0o766) if isfile(work_file): cmd = [shell_path, '-x', '-e', work_file] # this should raise if any problems occur while building _check_call(cmd, env=env, cwd=src_dir) if post in [True, None]: if post: with open(join(config.croot, 'prefix_files.txt'), 'r') as f: files1 = set(f.read().splitlines()) get_build_metadata(m, config=config) create_post_scripts(m, config=config) if not is_noarch_python(m): create_entry_points(m.get_value('build/entry_points'), config=config) files2 = prefix_files(prefix=config.build_prefix) post_process(sorted(files2 - files1), prefix=config.build_prefix, config=config, preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')), noarch=m.get_value('build/noarch'), skip_compile_pyc=m.get_value('build/skip_compile_pyc')) # The post processing may have deleted some files (like easy-install.pth) files2 = prefix_files(prefix=config.build_prefix) if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1): meta_files = (tuple(f for f in files2 - files1 if config.meta_dir in join(config.build_prefix, f)),) sys.exit(indent("""Error: Untracked file(s) %s found in conda-meta directory. This error usually comes from using conda in the build script. Avoid doing this, as it can lead to packages that include their dependencies.""" % meta_files)) post_build(m, sorted(files2 - files1), prefix=config.build_prefix, build_python=config.build_python, croot=config.croot) entry_point_script_names = get_entry_point_script_names(m.get_value('build/entry_points')) if is_noarch_python(m): pkg_files = [fi for fi in sorted(files2 - files1) if fi not in entry_point_script_names] else: pkg_files = sorted(files2 - files1) create_info_files(m, pkg_files, config=config, prefix=config.build_prefix) if m.get_value('build/noarch_python'): noarch_python.transform(m, sorted(files2 - files1), config.build_prefix) elif is_noarch_python(m): noarch_python.populate_files( m, pkg_files, config.build_prefix, entry_point_script_names) files3 = prefix_files(prefix=config.build_prefix) fix_permissions(files3 - files1, config.build_prefix) path = bldpkg_path(m, config) # lock the output directory while we build this file # create the tarball in a temporary directory to minimize lock time with TemporaryDirectory() as tmp: tmp_path = os.path.join(tmp, os.path.basename(path)) t = tarfile.open(tmp_path, 'w:bz2') def order(f): # we don't care about empty files so send them back via 100000 fsize = os.stat(join(config.build_prefix, f)).st_size or 100000 # info/* records will be False == 0, others will be 1. info_order = int(os.path.dirname(f) != 'info') return info_order, fsize # add files in order of a) in info directory, b) increasing size so # we can access small manifest or json files without decompressing # possible large binary or data files for f in sorted(files3 - files1, key=order): t.add(join(config.build_prefix, f), f) t.close() # we're done building, perform some checks tarcheck.check_all(tmp_path) copy_into(tmp_path, path, config.timeout) update_index(config.bldpkgs_dir, config, could_be_mirror=False) if not getattr(config, "noverify", False): verifier = Verify() ignore_scripts = context.ignore_package_verify_scripts if \ context.ignore_package_verify_scripts else None run_scripts = context.run_package_verify_scripts if \ context.run_package_verify_scripts else None verifier.verify_package(ignore_scripts=ignore_scripts, run_scripts=run_scripts, path_to_package=path) else: print("STOPPING BUILD BEFORE POST:", m.dist()) # returning true here says package is OK to test return True