def check_banned_import(files: List[str], *, bad_import_regex: str, correct_import_message: str) -> None: bad_files: List[str] = filter_files(files, snippet_regex=bad_import_regex) if bad_files: bad_files_str = "\n".join(bad_files) die( f"Found forbidden imports matching `{bad_import_regex}`. Instead, you should use " f"{correct_import_message}. Bad files:\n{bad_files_str}")
def getStoreKeyPasswords (filename): f = open(filename) storepass = None keypass = None for line in f: m = re.search('([a-z]+)\s*\=\s*"([^"]+)"', line) if m != None: if m.group(1) == "storepass": storepass = m.group(2) elif m.group(1) == "keypass": keypass = m.group(2) f.close() if storepass == None or keypass == None: common.die("Could not read signing key passwords") return (storepass, keypass)
def main() -> None: args = create_parser().parse_args() header_parse_failures = [] for directory in args.dirs: header_parse_failures.extend(check_dir(directory, args.files_added)) if header_parse_failures: failures = '\n '.join(str(failure) for failure in header_parse_failures) die(f"""\ ERROR: All .py files other than __init__.py should start with the header: {EXPECTED_HEADER_PY3} If they must support Python 2 still, they should start with the header: {EXPECTED_HEADER_PY2} --- The following {len(header_parse_failures)} file(s) do not conform: {failures}""")
def run_shellcheck() -> None: targets = set(glob("./**/*.sh", recursive=True)) | { "./pants", "./pants2", "./build-support/pants_venv", "./build-support/virtualenv", "./build-support/githooks/pre-commit", "./build-support/githooks/prepare-commit-msg", } targets -= set(glob("./build-support/bin/native/src/**/*.sh", recursive=True)) targets -= set(glob("./build-support/virtualenv.dist/**/*.sh", recursive=True)) command = ["shellcheck", "--shell=bash", "--external-sources"] + sorted(targets) try: subprocess.run(command, check=True) except subprocess.CalledProcessError: die("Please fix the above errors and run again.") else: green("./pants passed the shellcheck!")
def main() -> None: if not Path("pants.pex").is_file: die("pants.pex not found! Ensure you are in the repository root, then run " \ "'./build-support/bin/ci.sh -b' to bootstrap pants.pex with Python 3 or " \ "'./build-support/bin/ci.sh -2b' to bootstrap pants.pex with Python 2.") expected_abis = frozenset(create_parser().parse_args().abis) with zipfile.ZipFile("pants.pex", "r") as pex: with pex.open("PEX-INFO", "r") as pex_info: pex_info_content = pex_info.readline().decode() parsed_abis = frozenset( parse_abi_from_filename(filename) for filename in json.loads(pex_info_content)["distributions"].keys() if parse_abi_from_filename(filename) != "none" ) if not parsed_abis.issubset(expected_abis): die("pants.pex was built with the incorrect ABI. Expected wheels with: {}, found: {}." .format(' or '.join(sorted(expected_abis)), ', '.join(sorted(parsed_abis)))) green("Success. The pants.pex was built with wheels carrying the expected ABIs: {}." .format(', '.join(sorted(parsed_abis))))
def check_prebuilt_wheels(check_dir: str) -> None: banner(f"Checking prebuilt wheels for {CONSTANTS.pants_unstable_version}") missing_packages = [] for package in sorted(all_packages()): local_files = package.find_locally( version=CONSTANTS.pants_unstable_version, search_dir=check_dir ) if not local_files: missing_packages.append(package.name) continue if is_cross_platform(local_files) and len(local_files) != 6: formatted_local_files = ", ".join(f.name for f in local_files) missing_packages.append( f"{package.name} (expected 6 wheels, {{macosx, linux}} x {{cp36m, cp37m, cp38}}, " f"but found {formatted_local_files})" ) if missing_packages: formatted_missing = "\n ".join(missing_packages) die(f"Failed to find prebuilt wheels:\n {formatted_missing}") green(f"All {len(all_packages())} pantsbuild.pants packages were fetched and are valid.")
def run_shellcheck() -> None: targets = set(glob("./**/*.sh", recursive=True)) | { "./cargo", "./pants", "./build-support/pants_venv", "./build-support/virtualenv", "./build-support/githooks/pre-commit", "./build-support/githooks/prepare-commit-msg", } targets -= set( glob("./build-support/virtualenv.dist/**/*.sh", recursive=True)) targets -= set(glob("./build-support/virtualenvs/**/*.sh", recursive=True)) targets -= set( glob("./build-support/twine-deps.venv/**/*.sh", recursive=True)) command = ["shellcheck", "--shell=bash", "--external-sources" ] + sorted(targets) try: subprocess.run(command, check=True) except subprocess.CalledProcessError: die("Please fix the above errors and run again.")
def test_json_c(args: argparse.Namespace) -> bool: if not os.path.isfile(os.path.join(c.BUILD_DIR, JSON_C_ARCHIVE)): with pb.local.cwd(c.BUILD_DIR): download_archive(JSON_C_URL, JSON_C_ARCHIVE) invoke_quietly(TAR, "xf", JSON_C_ARCHIVE) cc_db_file = os.path.join(JSON_C_SRC, c.CC_DB_JSON) # unconditionally compile json-c since we don't know if # cc_db was generated from the environment we're in. with pb.local.cwd(JSON_C_SRC), pb.local.env(CC="clang"): if os.path.isfile('Makefile'): invoke(MAKE['clean']) configure = pb.local.get("./configure") invoke(configure) invoke(BEAR[MAKE[JOBS]]) if not os.path.isfile(cc_db_file): die("missing " + cc_db_file, errno.ENOENT) return transpile(cc_db_file)
def validate_pants_pkg(version: str, venv_bin_dir: Path, extra_pip_args: list[str]) -> None: def run_venv_pants(args: list[str]) -> str: # When we do (dry-run) testing, we need to run the packaged pants. It doesn't have internal # backend plugins embedded (but it does have all other backends): to load only the internal # packages, we override the `--python-path` to include them (and to implicitly exclude # `src/python`). return (subprocess.run( [ venv_bin_dir / "pants", "--no-remote-cache-read", "--no-remote-cache-write", "--no-pantsd", "--pythonpath=['pants-plugins']", *args, ], check=True, stdout=subprocess.PIPE, env={ **os.environ, **DISABLED_BACKENDS_CONFIG, }, ).stdout.decode().strip()) subprocess.run( [ venv_bin_dir / "pip", "install", *_pip_args(extra_pip_args), f"pantsbuild.pants=={version}", ], check=True, ) outputted_version = run_venv_pants(["--version"]) if outputted_version != version: die( softwrap(f""" Installed version of Pants ({outputted_version}) did not match requested version ({version})! """)) run_venv_pants(["list", "src::"])
def main() -> None: global JOBS setup_logging() logging.debug("args: %s", " ".join(sys.argv)) # check that the binaries have been built first bins = [c.TRANSPILER] for b in bins: if not os.path.isfile(b): msg = b + " not found; run build_translator.py first?" die(msg, errno.ENOENT) ensure_dir(c.DEPS_DIR) args = parse_args() JOBS = '-j' + str(args.jobs) # filter what gets tested using `what` argument tests = [test_minimal, test_hello_world, test_json_c, test_ruby, test_lua] tests = [t for t in tests if args.regex.search(t.__name__)] if not tests: die("nothing to test") success = True for t in tests: logging.debug("running test: %s", t.__name__) success = success and t(args) # FIXME: test lighttpd, varnish, Python, etc. # FIXME: add rebuild option?. if success: logging.info("PASS") else: logging.info("FAIL") quit(1)
def main() -> None: if not Path("pants.pex").is_file: die("pants.pex not found! Ensure you are in the repository root, then run " "`./build-support/bin/ci.py --bootstrap` to bootstrap pants.pex with Python 3.6 or " "`./build-support/bin/ci.py --bootstrap --python-version 2.7` to bootstrap pants.pex with " "Python 2.7.") expected_abis = frozenset(create_parser().parse_args().abis) with zipfile.ZipFile("pants.pex", "r") as pex: with pex.open("PEX-INFO", "r") as pex_info: pex_info_content = pex_info.readline().decode() parsed_abis = frozenset( parse_abi_from_filename(filename) for filename in json.loads(pex_info_content)["distributions"].keys() if parse_abi_from_filename(filename) != "none") if not parsed_abis.issubset(expected_abis): die("pants.pex was built with the incorrect ABI. Expected wheels with: {}, found: {}." .format(" or ".join(sorted(expected_abis)), ", ".join(sorted(parsed_abis)))) green( "Success. The pants.pex was built with wheels carrying the expected ABIs: {}." .format(", ".join(sorted(parsed_abis))))
def run_rust_tests() -> None: is_macos = platform.system() == "Darwin" command = [ "./cargo", "test", "--all", # We pass --tests to skip doc tests because our generated protos contain invalid doc tests # in their comments. "--tests", "--", "--nocapture", ] if is_macos: # The osx travis environment has a low file descriptors ulimit, so we avoid running too many # tests in parallel. command.append("--test-threads=1") with travis_section("RustTests", "Running Rust tests"): try: subprocess.run(command, env={**os.environ, "RUST_BACKTRACE": "all"}, check=True) except subprocess.CalledProcessError: die("Rust test failure.")
def main() -> None: globs = create_parser().parse_args().globs try: subprocess.run( [ "./pants", # We run MyPy against targets with either the tag `type_checked` or `partially_type_checked`. # `partially_type_checked` means that the target is still missing type hints, but that we # still want to run MyPy against it so that we can enforce the type hints that may be there # already and we can make sure that we don't revert in adding code that MyPy flags as an # error. "--tag=type_checked,partially_type_checked", "--backend-packages=pants.contrib.mypy", "--mypy-config=build-support/mypy/mypy.ini", "lint", *globs, ], check=True, ) except subprocess.CalledProcessError: die("Please fix the above errors and run again.")
def setup_pants_version(test_pants_version: PantsVersion): """Modify pants.ini to allow the pants version to be unspecified or keep what was originally there.""" updated_config = read_config() config_entry = "pants_version" if test_pants_version == PantsVersion.unspecified: updated_config.remove_option(CONFIG_GLOBAL_SECTION, config_entry) # NB: We also remove plugins as they refer to the pants_version. updated_config.remove_option(CONFIG_GLOBAL_SECTION, "plugins") banner(f"Temporarily removing `{config_entry}` from pants.ini.") elif test_pants_version == PantsVersion.config: if config_entry not in updated_config[CONFIG_GLOBAL_SECTION]: die(f"You requested to use `{config_entry}` from pants.ini, but pants.ini does not include `{config_entry}`!" ) current_pants_version = updated_config[CONFIG_GLOBAL_SECTION][ config_entry] banner( f"Using the `{config_entry}` set in pants.ini: `{current_pants_version}`." ) with temporarily_rewrite_config(updated_config): yield banner(f"Restoring original `{config_entry}` value in pants.ini.")
def update_cmakelists(): """ Even though we build the ast-exporter out-of-tree, we still need it to be treated as if it was in a subdirectory of clang to pick up the required clang headers, etc. """ filepath = os.path.join(c.LLVM_SRC, 'tools/clang/CMakeLists.txt') command = "add_clang_subdirectory(c2rust-ast-exporter)" if not os.path.isfile(filepath): die("not found: " + filepath, errno.ENOENT) # did we add the required command already? with open(filepath, "r") as handle: cmakelists = handle.readlines() add_commands = not any([command in l for l in cmakelists]) logging.debug("add commands to %s: %s", filepath, add_commands) if add_commands: with open(filepath, "a+") as handle: handle.writelines(command) logging.debug("added commands to %s", filepath)
def test_ast_exporter(cc_db_path: str): """ run ast-exporter on tinycbor if on linux. testing is not supported on macOS since bear requires system integrity protection to be disabled. """ assert not on_mac(), "sanity testing requires linux host" ast_extr = os.path.join(c.LLVM_BIN, "ast-exporter") if not os.path.isfile(ast_extr): die("ast-exporter not found in " + c.LLVM_BIN) ast_extr = get_cmd_or_die(ast_extr) include_dirs = get_system_include_dirs() with open(cc_db_path, "r") as handle: cc_db = json.load(handle) for cmd in cc_db: exporter_ast_from(ast_extr, cc_db_path, include_dirs, **cmd) logging.info("PASS sanity testing")
def _main(): setup_logging() logging.debug("args: %s", " ".join(sys.argv)) # FIXME: allow env/cli override of LLVM_SRC and LLVM_BLD # FIXME: check that cmake and ninja are installed # FIXME: option to build LLVM/Clang from master? # earlier plumbum versions are missing features such as TEE if pb.__version__ < c.MIN_PLUMBUM_VERSION: err = "locally installed version {} of plumbum is too old.\n" \ .format(pb.__version__) err += "please upgrade plumbum to version {} or later." \ .format(c.MIN_PLUMBUM_VERSION) die(err) args = _parse_args() # clang 3.6.0 is known to work; 3.4.0 known to not work. ensure_clang_version([3, 6, 0]) if args.clean_all: logging.info("cleaning all dependencies and previous built files") shutil.rmtree(c.LLVM_SRC, ignore_errors=True) shutil.rmtree(c.LLVM_BLD, ignore_errors=True) shutil.rmtree(c.BUILD_DIR, ignore_errors=True) shutil.rmtree(c.AST_EXPO_PRJ_DIR, ignore_errors=True) cargo = get_cmd_or_die("cargo") with pb.local.cwd(c.ROOT_DIR): invoke(cargo, "clean") ensure_dir(c.LLVM_BLD) ensure_dir(c.BUILD_DIR) git_ignore_dir(c.BUILD_DIR) download_llvm_sources() update_cmakelists() configure_and_build_llvm(args) build_transpiler(args) print_success_msg(args)
def _parse_args(): """ define and parse command line arguments here. """ desc = 'download dependencies for the AST exporter and built it.' parser = argparse.ArgumentParser(description=desc) parser.add_argument('-c', '--clean-all', default=False, action='store_true', dest='clean_all', help='clean everything before building') parser.add_argument('--with-clang', default=False, action='store_true', dest='with_clang', help='build clang with this tool') llvm_ver_help = 'fetch and build specified version of clang/LLVM (default: {})'.format(c.LLVM_VER) # FIXME: build this list by globbing for scripts/llvm-*.0.*-key.asc llvm_ver_choices = ["6.0.0", "6.0.1", "7.0.0", "7.0.1", "8.0.0", "9.0.0", "10.0.0", "10.0.1", "11.0.0", "11.1.0"] parser.add_argument('--with-llvm-version', default=None, action='store', dest='llvm_ver', help=llvm_ver_help, choices=llvm_ver_choices) parser.add_argument('--without-assertions', default=True, action='store_false', dest='assertions', help='build the tool and clang without assertions') parser.add_argument('-x', '--xcode', default=False, action='store_true', dest='xcode', help='generate Xcode project files (macOS only)') parser.add_argument('-v', '--verbose', default=False, action='store_true', dest='verbose', help='emit verbose information during build') parser.add_argument('--skip-signature-checks', default=False, action='store_true', dest='llvm_skip_signature_checks', help='skip signature check of source code archives') c.add_args(parser) args = parser.parse_args() if not on_mac() and args.xcode: die("-x/--xcode option requires macOS host.") c.update_args(args) return args
def build(packages: Iterable[Package], bdist_wheel_flags: Iterable[str]) -> None: args = ( "./pants", # TODO(#9924). "--no-dynamic-ui", # TODO(#7654): It's not safe to use Pantsd because we're already using Pants to run # this script. "--concurrent", "setup-py", *(package.target for package in packages), "--", "bdist_wheel", *bdist_wheel_flags, ) try: subprocess.run(args, check=True) for package in packages: found_wheels = sorted( Path("dist").glob(f"{package}-{version}-*.whl")) # NB: For any platform-specific wheels, like pantsbuild.pants, we assume that the # top-level `dist` will only have wheels built for the current platform. This # should be safe because it is not possible to build native wheels for another # platform. if not is_cross_platform( found_wheels) and len(found_wheels) > 1: raise ValueError( f"Found multiple wheels for {package} in the `dist/` folder, but was " f"expecting only one wheel: {sorted(wheel.name for wheel in found_wheels)}." ) for wheel in found_wheels: if not (destination / wheel.name).exists(): # We use `copy2` to preserve metadata. shutil.copy2(wheel, destination) except subprocess.CalledProcessError as e: failed_packages = ",".join(package.name for package in packages) failed_targets = " ".join(package.target for package in packages) die( f"Failed to build packages {failed_packages} for {version} with targets " f"{failed_targets}.\n\n{e!r}", )
def native_engine_so_in_s3_cache(*, aws_bucket: str, native_engine_so_aws_key: str) -> bool: ls_output = subprocess.run( [ *AWS_COMMAND_PREFIX, "s3api", "list-object-versions", "--bucket", aws_bucket, "--prefix", native_engine_so_aws_key, "--max-items", "2", ], stdout=subprocess.PIPE, check=True, ).stdout.decode() try: return _s3_listing_has_unique_version(native_engine_so_aws_key, ls_output) except NonUniqueVersionError as e: die(str(e))
def confirm(target_dir, rename_scheme): file_count = len(rename_scheme) what_to_rename = 'all {} files'.format( file_count) if file_count > 1 else 'the file' proceed = input( '{}: sure you want to rename {} in {} [yne]?' \ .format(SCRIPT_NAME, what_to_rename, target_dir)).lower() if len(proceed) == 1 and proceed == 'y': return (True, None) elif len(proceed) == 1 and proceed == 'e': temp_file = save_scheme(rename_scheme) if (not temp_file): die('Can\'t create the temporary file.') if not _open_editor(temp_file): die('Can\'t start the shell text editor.') return (True, temp_file) elif len(proceed) == 1 and (proceed == 'n' or proceed == 'q'): return (False, None) return confirm(target_dir, rename_scheme)
def download_pex_bin() -> Iterator[Path]: """Download PEX and return the path to the binary.""" try: pex_version = next( strip_prefix(ln, "pex==").rstrip() for ln in Path( "3rdparty/python/requirements.txt").read_text().splitlines() if ln.startswith("pex==")) except (FileNotFoundError, StopIteration) as exc: die( softwrap(f""" Could not find a requirement starting with `pex==` in 3rdparty/python/requirements.txt: {repr(exc)} """)) with TemporaryDirectory() as tempdir: resp = requests.get( f"https://github.com/pantsbuild/pex/releases/download/v{pex_version}/pex" ) resp.raise_for_status() result = Path(tempdir, "pex") result.write_bytes(resp.content) yield result
def test_ruby(args: argparse.Namespace) -> bool: if on_mac(): die("transpiling ruby on mac is not supported.") if not os.path.isfile(os.path.join(c.DEPS_DIR, RUBY_ARCHIVE)): with pb.local.cwd(c.DEPS_DIR): download_archive(RUBY_URL, RUBY_ARCHIVE) invoke_quietly(TAR, "xf", RUBY_ARCHIVE) cc_db_file = os.path.join(RUBY_SRC, c.CC_DB_JSON) # unconditionally compile ruby since we don't know if # cc_db was generated from the environment we're in. with pb.local.cwd(RUBY_SRC), pb.local.env(CC="clang", cflags="-w"): configure = pb.local.get("./configure") invoke(configure) invoke(BEAR[MAKE[JOBS]]) if not os.path.isfile(cc_db_file): die("missing " + cc_db_file, errno.ENOENT) with open(cc_db_file) as cc_db: return transpile_files(cc_db, args.jobs, None, False, args.verbose)
def run_v2_tests(*, targets: Set[str], execution_strategy: str, oauth_token_path: Optional[str] = None) -> None: try: command = (["./pants.pex", "--no-v1", "--v2", "test.pytest"] + sorted(targets) + PYTEST_PASSTHRU_ARGS) if oauth_token_path is not None: command[3:3] = [ "--pants-config-files=pants.remote.ini", # We turn off speculation to reduce the risk of flakiness, where a test passes locally but # fails remoting and we have a race condition for which environment executes first. "--process-execution-speculation-strategy=none", f"--remote-oauth-bearer-token-path={oauth_token_path}" ] subprocess.run(command, check=True) except subprocess.CalledProcessError: die(f"V2 unit tests failure ({execution_strategy} build execution)." ) else: green( f"V2 unit tests passed ({execution_strategy} build execution)." )
def validate_package_access(self, pkg_name: str) -> None: actual_owners = set() actual_maintainers = set() for role_assignment in self.client.package_roles(pkg_name): role, username = role_assignment if role == "Owner": actual_owners.add(username) elif role == "Maintainer": actual_maintainers.add(username) else: raise ValueError( f"Unrecognized role {role} for user {username}") err_msg = "" err_msg += self.validate_role_sets("owner", actual_owners, _expected_owners) err_msg += self.validate_role_sets("maintainer", actual_maintainers, _expected_maintainers) if err_msg: die(f"Role discrepancies for {pkg_name}: {err_msg}") print(f"Roles for package {pkg_name} as expected.")
def _main(): setup_logging() logging.debug("args: %s", " ".join(sys.argv)) # FIXME: allow env/cli override of LLVM_SRC, LLVM_VER, and LLVM_BLD # FIXME: check that cmake and ninja are installed # FIXME: option to build LLVM/Clang from master? # earlier plumbum versions are missing features such as TEE if pb.__version__ < c.MIN_PLUMBUM_VERSION: err = "locally installed version {} of plumbum is too old.\n" \ .format(pb.__version__) err += "please upgrade plumbum to version {} or later." \ .format(c.MIN_PLUMBUM_VERSION) die(err) args = _parse_args() if args.clean_all: logging.info("cleaning all dependencies and previous built files") shutil.rmtree(c.LLVM_SRC, ignore_errors=True) shutil.rmtree(c.LLVM_BLD, ignore_errors=True) shutil.rmtree(c.DEPS_DIR, ignore_errors=True) # prerequisites if not have_rust_toolchain(c.CUSTOM_RUST_NAME): die("missing rust toolchain: " + c.CUSTOM_RUST_NAME, errno.ENOENT) # clang 3.6.0 is known to work; 3.4.0 known to not work. ensure_clang_version([3, 6, 0]) ensure_rustc_version(c.CUSTOM_RUST_RUSTC_VERSION) ensure_dir(c.LLVM_BLD) ensure_dir(c.DEPS_DIR) git_ignore_dir(c.DEPS_DIR) if on_linux(): build_a_bear() if not os.path.isfile(c.BEAR_BIN): die("bear not found", errno.ENOENT) download_llvm_sources() integrate_ast_exporter() cc_db = install_tinycbor() configure_and_build_llvm(args) # NOTE: we're not doing this anymore since it is # faster and takes less space to simply pull the # prebuilt nightly binaries with rustup # download_and_build_custom_rustc(args) build_ast_importer(args.debug) if not on_mac() and args.sanity_test: test_ast_exporter(cc_db)
def check_pants_wheels_present(check_dir: str | Path) -> None: banner(f"Checking prebuilt wheels for {CONSTANTS.pants_unstable_version}") missing_packages = [] for package in PACKAGES: local_files = package.find_locally( version=CONSTANTS.pants_unstable_version, search_dir=check_dir) if not local_files: missing_packages.append(package.name) continue if is_cross_platform(local_files) and len(local_files) != 7: formatted_local_files = "\n ".join( sorted(f.name for f in local_files)) missing_packages.append( softwrap(f""" {package.name}. Expected 7 wheels ({{cp37m, cp38, cp39}} x {{macosx-x86_64, linux-x86_64}} + cp39-macosx), but found {len(local_files)}:\n {formatted_local_files} """)) if missing_packages: formatted_missing = "\n ".join(missing_packages) die(f"Failed to find prebuilt wheels:\n {formatted_missing}") green( f"All {len(PACKAGES)} pantsbuild.pants packages were fetched and are valid." )
def _parse_args(): """ define and parse command line arguments here. """ desc = 'download dependencies for the AST exporter and built it.' parser = argparse.ArgumentParser(description=desc) parser.add_argument('-c', '--clean-all', default=False, action='store_true', dest='clean_all', help='clean everything before building') parser.add_argument('--with-clang', default=False, action='store_true', dest='with_clang', help='build clang with this tool') parser.add_argument('--without-assertions', default=True, action='store_false', dest='assertions', help='build the tool and clang without assertions') parser.add_argument('-x', '--xcode', default=False, action='store_true', dest='xcode', help='generate Xcode project files (macOS only)') c.add_args(parser) args = parser.parse_args() if not on_mac() and args.xcode: die("-x/--xcode option requires macOS host.") c.update_args(args) return args
def update_cbor_prefix(makefile): """ rewrite prefix variable in tinycbor makefile. """ if not os.path.isfile(makefile): die("not found: " + makefile, errno.ENOENT) lines = [] writeback = False with open(makefile, 'r') as fh: for line in fh.readlines(): m = re.match(r'^\s*prefix\s*=\s*([^\s]+)', line) if m: logging.debug("tinycbor prefix: '%s'", m.group(1)) prefix = m.group(1) writeback = prefix != c.CBOR_PREFIX lines.append("prefix = " + c.CBOR_PREFIX + os.linesep) else: lines.append(line) if writeback: logging.debug("updating tinycbor Makefile") with open(makefile, 'w') as fh: fh.writelines("".join(lines))
def publish_apple_silicon() -> None: banner("Building and publishing an Apple Silicon wheel") if os.environ.get("USE_PY39") != "true": die("Must set `USE_PY39=true` when building for Apple Silicon.") if os.environ.get("MODE") == "debug": die("Must build Rust in release mode, not debug. Please run `unset MODE`." ) check_clean_git_branch() check_pgp() check_roles() dest_dir = CONSTANTS.deploy_pants_wheel_dir / CONSTANTS.pants_stable_version if dest_dir.exists(): shutil.rmtree(dest_dir) subprocess.run( [ "./pants", "--concurrent", f"--pants-distdir={dest_dir}", "package", PANTS_PKG.target, ], check=True, ) expected_whl = ( dest_dir / f"pantsbuild.pants-{CONSTANTS.pants_stable_version}-cp39-cp39-macosx_11_0_arm64.whl" ) if not expected_whl.exists(): die( softwrap(f""" Failed to find {expected_whl}. Are you running from the correct platform and macOS version? """)) create_twine_venv() subprocess.run( [CONSTANTS.twine_venv_dir / "bin/twine", "check", expected_whl], check=True) upload_wheels_via_twine() banner("Successfully released Apple Silicon wheel to PyPI")
def _main(): if on_mac(): die("Cross-checking is only supported on Linux hosts.") setup_logging() logging.debug("args: %s", " ".join(sys.argv)) # earlier plumbum versions are missing features such as TEE if pb.__version__ < c.MIN_PLUMBUM_VERSION: err = "locally installed version {} of plumbum is too old.\n" \ .format(pb.__version__) err += "please upgrade plumbum to version {} or later." \ .format(c.MIN_PLUMBUM_VERSION) die(err) args = _parse_args() if args.clean_all: logging.info("cleaning all dependencies and previous built files") shutil.rmtree(c.CLANG_XCHECK_PLUGIN_BLD, ignore_errors=True) make = get_cmd_or_die('make') with pb.local.cwd(c.LIBFAKECHECKS_DIR): make('clean') # prerequisites if not have_rust_toolchain(c.CUSTOM_RUST_NAME): die("missing rust toolchain: " + c.CUSTOM_RUST_NAME, errno.ENOENT) # clang 3.6.0 is known to work; 3.4.0 known to not work. ensure_clang_version([3, 6, 0]) # NOTE: it seems safe to disable this check since we now # that we use a rust-toolchain file for rustc versioning. # ensure_rustc_version(c.CUSTOM_RUST_RUSTC_VERSION) ensure_dir(c.CLANG_XCHECK_PLUGIN_BLD) ensure_dir(c.BUILD_DIR) git_ignore_dir(c.BUILD_DIR) build_clang_plugin(args)
def debug ( adbCmd, deqpCmdLine, targetGDBPort, hostGDBPort, jdbPort, jdbCmd, gdbCmd, buildDir, deviceLibs, breakpoints, serial, deviceGdbCmd, appProcessName, linkerName ): programPid = -1 gdbServerProcess = None gdbProcess = None jdbProcess = None curDir = os.getcwd() debugDir = os.path.join(common.ANDROID_DIR, "debug") serialArg = "-s " + serial if serial != None else "" if os.path.exists(debugDir): shutil.rmtree(debugDir) os.makedirs(debugDir) os.chdir(debugDir) try: # Start execution print("Starting intent...") common.execArgs([adbCmd] + (["-s", serial] if serial != None else []) + ["shell", "am", "start", "-W", "-D", "-n", "com.drawelements.deqp/android.app.NativeActivity", "-e", "cmdLine", "\"\"unused " + deqpCmdLine + "\"\""]) print("Intent started") # Kill existing gdbservers print("Check and kill existing gdbserver") gdbPid = getADBProgramPID(adbCmd, "gdbserver", serial) if gdbPid != -1: print("Found gdbserver with PID %i" % gdbPid) common.execArgs([adbCmd] + (["-s", serial] if serial != None else []) + ["shell", "run-as", "com.drawelements.deqp", "kill", "-9", str(gdbPid)]) print("Killed gdbserver") else: print("Couldn't find existing gdbserver") programPid = getADBProgramPID(adbCmd, "com.drawelements.deqp:testercore", serial) print("Find process PID") if programPid == -1: common.die("Couldn't get PID of testercore") print("Process running with PID %i" % programPid) # Start gdbserver print("Start gdbserver for PID %i redirect stdout to gdbserver-stdout.txt" % programPid) gdbServerProcess = subprocess.Popen([adbCmd] + (["-s", serial] if serial != None else []) + ["shell", "run-as", "com.drawelements.deqp", deviceGdbCmd, "localhost:" + str(targetGDBPort), "--attach", str(programPid)], stdin=subprocess.PIPE, stdout=open("gdbserver-stdout.txt", "wb"), stderr=open("gdbserver-stderr.txt", "wb")) print("gdbserver started") time.sleep(1) gdbServerProcess.poll() if gdbServerProcess.returncode != None: common.die("gdbserver returned unexpectly with return code %i see gdbserver-stdout.txt for more info" % gdbServerProcess.returncode) # Setup port forwarding print("Forwarding local port to gdbserver port") common.execArgs([adbCmd] + (["-s", serial] if serial != None else []) + ["forward", "tcp:" + str(hostGDBPort), "tcp:" + str(targetGDBPort)]) # Pull some data files for debugger print("Pull /system/bin/%s from device" % appProcessName) common.execArgs([adbCmd] + (["-s", serial] if serial != None else []) + ["pull", "/system/bin/" + str(appProcessName)]) print("Pull /system/bin/%s from device" % linkerName) common.execArgs([adbCmd] + (["-s", serial] if serial != None else []) + ["pull", "/system/bin/" + str(linkerName)]) for lib in deviceLibs: print("Pull library %s from device" % lib) try: common.execArgs([adbCmd] + (["-s", serial] if serial != None else []) + ["pull", lib]) except Exception as e: print("Failed to pull library '%s'. Error: %s" % (lib, str(e))) print("Copy %s from build dir" % common.NATIVE_LIB_NAME) shutil.copyfile(os.path.join(buildDir, common.NATIVE_LIB_NAME), common.NATIVE_LIB_NAME) # Forward local port for jdb print("Forward local port to jdb port") common.execArgs([adbCmd] + (["-s", serial] if serial != None else []) + ["forward", "tcp:" + str(jdbPort), "jdwp:" + str(programPid)]) # Connect JDB print("Start jdb process redirectd stdout to jdb-stdout.txt") jdbProcess = subprocess.Popen([jdbCmd, "-connect", "com.sun.jdi.SocketAttach:hostname=localhost,port=" + str(jdbPort), "-sourcepath", "../package"], stdin=subprocess.PIPE, stdout=open("jdb-stdout.txt", "wb"), stderr=open("jdb-stderr.txt", "wb")) print("Started jdb process") # Write gdb.setup print("Write gdb.setup") gdbSetup = open("gdb.setup", "wb") gdbSetup.write("file %s\n" % appProcessName) gdbSetup.write("set solib-search-path .\n") gdbSetup.write("target remote :%i\n" % hostGDBPort) gdbSetup.write("set breakpoint pending on\n") for breakpoint in breakpoints: print("Set breakpoint at %s" % breakpoint) gdbSetup.write("break %s\n" % breakpoint) gdbSetup.write("set breakpoint pending off\n") gdbSetup.close() print("Start gdb") gdbProcess = subprocess.Popen(common.shellquote(gdbCmd) + " -x gdb.setup", shell=True) gdbProcess.wait() print("gdb returned with %i" % gdbProcess.returncode) gdbProcess=None print("Close jdb process with 'quit'") jdbProcess.stdin.write("quit\n") jdbProcess.wait() print("JDB returned %s" % str(jdbProcess.returncode)) jdbProcess=None print("Kill gdbserver process") gdbServerProcess.kill() gdbServerProcess=None print("Killed gdbserver process") print("Kill program %i" % programPid) common.execArgs([adbCmd] + (["-s", serial] if serial != None else []) + ["shell", "run-as", "com.drawelements.deqp", "kill", "-9", str(programPid)]) print("Killed program") finally: if jdbProcess and jdbProcess.returncode == None: print("Kill jdb") jdbProcess.kill() elif jdbProcess: print("JDB returned %i" % jdbProcess.returncode) if gdbProcess and gdbProcess.returncode == None: print("Kill gdb") gdbProcess.kill() elif gdbProcess: print("GDB returned %i" % gdbProcess.returncode) if gdbServerProcess and gdbServerProcess.returncode == None: print("Kill gdbserver") gdbServerProcess.kill() elif gdbServerProcess: print("GDB server returned %i" % gdbServerProcess.returncode) if programPid != -1: print("Kill program %i" % programPid) common.execArgs([adbCmd] + (["-s", serial] if serial != None else []) + ["shell", "run-as", "com.drawelements.deqp", "kill", "-9", str(programPid)]) print("Killed program") os.chdir(curDir)
def main() -> None: desc = 'run regression / unit / feature tests.' parser = argparse.ArgumentParser(description=desc) parser.add_argument('directory', type=readable_directory) parser.add_argument('--only-files', dest='regex_files', type=regex, default='.*', help="Regular expression to filter which tests to run") parser.add_argument('--only-directories', dest='regex_directories', type=regex, default='.*', help="Regular expression to filter which tests to run") parser.add_argument( '--log', dest='logLevel', choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], default='CRITICAL', help="Set the logging level") parser.add_argument('--keep', dest='keep', action='append', choices=intermediate_files + ['all'], default=[], help="Which intermediate files to not clear") parser.add_argument( '--test-longdoubles', dest='test_longdoubles', default=False, action="store_true", help= "Enables testing of long double translation which requires gcc headers", ) c.add_args(parser) args = parser.parse_args() c.update_args(args) test_directories = get_testdirectories(args.directory, args.regex_files, args.keep, args.test_longdoubles) setup_logging(args.logLevel) logging.debug("args: %s", " ".join(sys.argv)) # check that the binaries have been built first bins = [c.TRANSPILER] for b in bins: if not os.path.isfile(b): msg = b + " not found; run cargo build --release first?" die(msg, errno.ENOENT) # NOTE: it seems safe to disable this check since we now # that we use a rust-toolchain file for rustc versioning. # ensure_rustc_version(c.CUSTOM_RUST_RUSTC_VERSION) if not test_directories: die("nothing to test") # Accumulate test case stats test_results = { "unexpected failures": 0, "unexpected successes": 0, "expected failures": 0, "successes": 0 } for test_directory in test_directories: if args.regex_directories.fullmatch(test_directory.name): # Testdirectories are run one after another. Only test directories # that match the '--only-directories' or tests that match the # '--only-files' arguments are run. We make a best effort to clean # up files we left behind. try: statuses = test_directory.run() except (KeyboardInterrupt, SystemExit): test_directory.cleanup() raise finally: test_directory.cleanup() for status in statuses: test_results[status.value] += 1 # Print out test case stats sys.stdout.write("\nTest summary:\n") for variant, count in test_results.items(): sys.stdout.write(" {}: {}\n".format(variant, count)) # If anything unexpected happened, exit with error code 1 unexpected = \ test_results["unexpected failures"] + \ test_results["unexpected successes"] if 0 < unexpected: quit(1)
def debug( adbCmd, deqpCmdLine, targetGDBPort, hostGDBPort, jdbPort, jdbCmd, gdbCmd, buildDir, deviceLibs, breakpoints ): programPid = -1 gdbServerProcess = None gdbProcess = None jdbProcess = None curDir = os.getcwd() debugDir = os.path.join(common.ANDROID_DIR, "debug") if os.path.exists(debugDir): shutil.rmtree(debugDir) os.makedirs(debugDir) os.chdir(debugDir) try: # Start execution print("Starting intent...") common.execute("%s shell am start -W -D -n com.drawelements.deqp/android.app.NativeActivity -e cmdLine \"unused %s\"" % (adbCmd, deqpCmdLine.replace("\"", "\\\""))) print("Intent started") # Kill existing gdbservers print("Check and kill existing gdbserver") gdbPid = getADBProgramPID("lib/gdbserver") if gdbPid != -1: print("Found gdbserver with PID %i" % gdbPid) common.execute("%s shell run-as com.drawelements.deqp kill -9 %i" % (adbCmd, gdbPid)) print("Killed gdbserver") else: print("Couldn't find existing gdbserver") programPid = getADBProgramPID("com.drawelements.deqp:testercore") print("Find process PID") if programPid == -1: common.die("Couldn't get PID of testercore") print("Process running with PID %i" % programPid) # Start gdbserver print("Start gdbserver for PID %i redirect stdout to gdbserver-stdout.txt" % programPid) gdbServerProcess = subprocess.Popen("%s shell run-as com.drawelements.deqp lib/gdbserver localhost:%i --attach %i" % (adbCmd, targetGDBPort, programPid), shell=True, stdin=subprocess.PIPE, stdout=open("gdbserver-stdout.txt", "wb"), stderr=open("gdbserver-stderr.txt", "wb")) print("gdbserver started") time.sleep(1) gdbServerProcess.poll() if gdbServerProcess.returncode != None: common.die("gdbserver returned unexpectly with return code %i see gdbserver-stdout.txt for more info" % gdbServerProcess.returncode) # Setup port forwarding print("Forwarding local port to gdbserver port") common.execute("%s forward tcp:%i tcp:%i" % (adbCmd, hostGDBPort, targetGDBPort)) # Pull some data files for debugger print("Pull /system/bin/app_process from device") common.execute("%s pull /system/bin/app_process" % adbCmd) print("Pull /system/bin/linker from device") common.execute("%s pull /system/bin/linker" % adbCmd) for lib in deviceLibs: print("Pull library %s from device" % lib) common.execute("%s pull %s" % (adbCmd, lib)) print("Copy libtestercore.so from build dir") shutil.copyfile(os.path.join(buildDir, "libtestercore.so"), "libtestercore.so") # Forward local port for jdb print("Forward local port to jdb port") common.execute("%s forward tcp:%i jdwp:%i" % (adbCmd, jdbPort, programPid)) # Connect JDB print("Start jdb process redirectd stdout to jdb-stdout.txt") jdbProcess = subprocess.Popen("%s -connect com.sun.jdi.SocketAttach:hostname=localhost,port=%i -sourcepath ../package" % (jdbCmd, jdbPort), shell=True, stdin=subprocess.PIPE, stdout=open("jdb-stdout.txt", "wb"), stderr=open("jdb-stderr.txt", "wb")) print("Started jdb process") # Write gdb.setup print("Write gdb.setup") gdbSetup = open("gdb.setup", "wb") gdbSetup.write("file app_process\n") gdbSetup.write("set solib-search-path .\n") gdbSetup.write("target remote :%i\n" % hostGDBPort) gdbSetup.write("set breakpoint pending on\n") for breakpoint in breakpoints: print("Set breakpoint at %s" % breakpoint) gdbSetup.write("break %s\n" % breakpoint) gdbSetup.write("set breakpoint pending off\n") gdbSetup.close() print("Start gdb") gdbProcess = subprocess.Popen("%s -x gdb.setup" % common.shellquote(gdbCmd), shell=True) gdbProcess.wait() print("gdb returned with %i" % gdbProcess.returncode) gdbProcess=None print("Close jdb process with 'quit'") jdbProcess.stdin.write("quit\n") jdbProcess.wait() print("JDB returned %s" % str(jdbProcess.returncode)) jdbProcess=None print("Kill gdbserver process") gdbServerProcess.kill() gdbServerProcess=None print("Killed gdbserver process") print("Kill program %i" % programPid) common.execute("%s shell run-as com.drawelements.deqp -9 %i" % (adbCmd, programPid)) print("Killed program") finally: if jdbProcess and jdbProcess.returncode == None: print("Kill jdb") jdbProcess.kill() elif jdbProcess: print("JDB returned %i" % jdbProcess.returncode) if gdbProcess and gdbProcess.returncode == None: print("Kill gdb") gdbProcess.kill() elif gdbProcess: print("GDB returned %i" % gdbProcess.returncode) if gdbServerProcess and gdbServerProcess.returncode == None: print("Kill gdbserver") gdbServerProcess.kill() elif gdbServerProcess: print("GDB server returned %i" % gdbServerProcess.returncode) print("Kill program %i" % programPid) common.execute("%s shell run-as com.drawelements.deqp kill -9 %i" % (adbCmd, programPid)) print("Killed program") os.chdir(curDir)
def ensure_shellcheck_installed() -> None: if shutil.which("shellcheck") is None: die("`shellcheck` not installed! You may download this through your operating system's " "package manager, such as brew, apt, or yum. See " "https://github.com/koalaman/shellcheck#installing.")
def build_clang_plugin(args: str) -> None: """ run cmake as needed to generate ninja buildfiles. then run ninja. """ cargo = get_cmd_or_die("cargo") config_capi_src_dir = os.path.join(c.CROSS_CHECKS_DIR, "rust-checks", "config-capi") cargo_target_dir = os.path.join(c.CLANG_XCHECK_PLUGIN_BLD, "config-capi-target") config_lib_path = os.path.join(cargo_target_dir, "debug" if args.debug else "release", "libc2rust_xcheck_config_capi.a") with pb.local.cwd(config_capi_src_dir): cargo_args = ["build", "--package", "c2rust-xcheck-config-capi"] if not args.debug: cargo_args.append("--release") with pb.local.env(CARGO_TARGET_DIR=cargo_target_dir): invoke(cargo[cargo_args]) ninja = get_cmd_or_die("ninja") # Possible values are Release, Debug, RelWithDebInfo and MinSizeRel build_type = "Debug" if args.debug else "RelWithDebInfo" ninja_build_file = os.path.join(c.CLANG_XCHECK_PLUGIN_BLD, "build.ninja") with pb.local.cwd(c.CLANG_XCHECK_PLUGIN_BLD): if os.path.isfile(ninja_build_file): prev_build_type = get_ninja_build_type(ninja_build_file) run_cmake = prev_build_type != build_type else: run_cmake = True if run_cmake: cmake = get_cmd_or_die("cmake") max_link_jobs = est_parallel_link_jobs() cargs = [ "-G", "Ninja", c.CLANG_XCHECK_PLUGIN_SRC, "-DXCHECK_CONFIG_LIB={}".format(config_lib_path), "-DCMAKE_BUILD_TYPE=" + build_type, "-DBUILD_SHARED_LIBS=1", "-DLLVM_PARALLEL_LINK_JOBS={}".format(max_link_jobs) ] if args.with_c2rust_clang: llvm_cmake_dir = os.path.join(c.LLVM_BLD, "lib", "cmake", "llvm") if not os.path.exists(llvm_cmake_dir): die("missing LLVM cmake files at: " + llvm_cmake_dir) clang_cmake_dir = os.path.join(c.LLVM_BLD, "lib", "cmake", "clang") if not os.path.exists(clang_cmake_dir): die("missing clang cmake files at: " + clang_cmake_dir) llvm_lit = os.path.join(c.LLVM_BLD, "bin", "llvm-lit") if not os.path.exists(llvm_lit): die("missing llvm-lit binary at: " + llvm_lit) cargs.extend([ "-DLLVM_DIR={}".format(llvm_cmake_dir), "-DClang_DIR={}".format(clang_cmake_dir), "-DLLVM_EXTERNAL_LIT={}".format(llvm_lit) ]) else: # Some distros, e.g., Arch, Ubuntu, ship llvm-lit as /usr/bin/lit cargs.append("-DLLVM_EXTERNAL_LIT={}".format(pb.local['lit'])) invoke(cmake[cargs]) else: logging.debug("found existing ninja.build, not running cmake") invoke(ninja)
def path_to_cc_db(): if not os.path.isfile(cc_cmd_db) and not on_mac(): die("not found: " + cc_cmd_db) return cc_cmd_db
def check_pants_pex_exists() -> None: if not Path("pants.pex").is_file(): die("pants.pex not found! Either run `./build-support/bin/ci.py --bootstrap` or check that " "AWS is properly downloading the uploaded `pants.pex`.")
if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('-p', '--parallel', dest='doParallel', action="store_true", help="Install package in parallel.") parser.add_argument('-s', '--serial', dest='serial', type=str, nargs='+', help="Install package to device with serial number.") parser.add_argument('-a', '--all', dest='all', action="store_true", help="Install to all devices.") args = parser.parse_args() if args.all: installToAllDevices(args.doParallel) else: if args.serial == None: devices = common.getDevices(common.ADB_BIN) if len(devices) == 0: common.die('No devices connected') elif len(devices) == 1: installToDevice(devices[0]) else: print "More than one device connected:" for i in range(0, len(devices)): print "%3d: %16s %s" % ((i+1), devices[i].serial, devices[i].model) deviceNdx = int(raw_input("Choose device (1-%d): " % len(devices))) installToDevice(devices[deviceNdx-1]) else: devices = common.getDevices(common.ADB_BIN) devices = [dev for dev in devices if dev.serial in args.serial] devSerials = [dev.serial for dev in devices] notFounds = [serial for serial in args.serial if not serial in devSerials]