def dry_run_workloads(genny_repo_root: str, workspace_root: str, given_workload: str = None): info = toolchain_info(genny_repo_root=genny_repo_root, workspace_root=workspace_root) if given_workload is not None: workloads = [given_workload] else: glob_pattern = os.path.join(genny_repo_root, "src", "workloads", "*", "*.yml") workloads = glob.glob(glob_pattern) curr = 0 for workload in workloads: SLOG.info("Checking workload", workload=workload, index=curr, of_how_many=len(workloads)) dry_run_workload( yaml_file_path=workload, is_darwin=info.is_darwin, genny_repo_root=genny_repo_root, workspace_root=workspace_root, ) curr += 1
def cmake_test(genny_repo_root: str, workspace_root: str): info = toolchain.toolchain_info(genny_repo_root=genny_repo_root, workspace_root=workspace_root) workdir = os.path.join(genny_repo_root, "build") xunit_dir = os.path.join(workspace_root, "build", "XUnitXML") os.makedirs(xunit_dir, exist_ok=True) ctest_cmd = [ "ctest", "--verbose", "--label-exclude", "(standalone|sharded|single_node_replset|three_node_replset|benchmark)", ] def cmd_func() -> bool: output: cmd_runner.RunCommandOutput = cmd_runner.run_command( cmd=ctest_cmd, cwd=workdir, env=info.toolchain_env, capture=False, check=True) return output.returncode == 0 _run_command_with_sentinel_report(cmd_func=cmd_func, workspace_root=workspace_root, genny_repo_root=genny_repo_root)
def compile_all( genny_repo_root: str, workspace_root: str, build_system: str, os_family: str, linux_distro: str, ignore_toolchain_version: bool, ): toolchain_info = toolchain.toolchain_info( genny_repo_root=genny_repo_root, workspace_root=workspace_root, os_family=os_family, linux_distro=linux_distro, ignore_toolchain_version=ignore_toolchain_version, ) cmd_base = [build_system] if build_system == "make": cmd_base.append("-j8") compile_cmd = [*cmd_base, "-C", "build"] run_command( cmd=compile_cmd, env=toolchain_info.toolchain_env, cwd=genny_repo_root, capture=False, check=True, )
def cmake( genny_repo_root: str, workspace_root: str, build_system: str, os_family: str, linux_distro: str, ignore_toolchain_version: bool, sanitizer: str, cmake_args: List[str], ): toolchain_info = toolchain.toolchain_info( workspace_root=workspace_root, genny_repo_root=genny_repo_root, os_family=os_family, linux_distro=linux_distro, ignore_toolchain_version=ignore_toolchain_version, ) generators = {"make": "Unix Makefiles", "ninja": "Ninja"} cmake_cmd = ["cmake", "-B", "build", "-G", generators[build_system]] # We set both the prefix path and the toolchain file here as a hack to allow cmake # to find both shared and static libraries. vcpkg doesn't natively support a project # using both. cmake_prefix_paths = [ os.path.join( toolchain_info.toolchain_dir, f"installed/x64-{toolchain_info.triplet_os}-dynamic", ), os.path.join( toolchain_info.toolchain_dir, f"installed/x64-{toolchain_info.triplet_os}", ), ] cmake_toolchain_file = os.path.join(toolchain_info.toolchain_dir, "scripts/buildsystems/vcpkg.cmake") cmake_cmd += [ "-DGENNY_WORKSPACE_ROOT={}".format(workspace_root), # "-DGENNY_REPO_ROOT={}".format(genny_repo_root), # Not needed (yet). "-DCMAKE_PREFIX_PATH={}".format(";".join(cmake_prefix_paths)), "-DCMAKE_TOOLCHAIN_FILE={}".format(cmake_toolchain_file), "-DCMAKE_EXPORT_COMPILE_COMMANDS=1", f"-DVCPKG_TARGET_TRIPLET=x64-{toolchain_info.triplet_os}", ] cmake_cmd += _sanitizer_flags(sanitizer) cmake_cmd += cmake_args run_command( cwd=genny_repo_root, cmd=cmake_cmd, env=toolchain_info.toolchain_env, capture=False, check=True, )
def benchmark_test(genny_repo_root: str, workspace_root: str): info = toolchain.toolchain_info(genny_repo_root=genny_repo_root, workspace_root=workspace_root) workdir = os.path.join(genny_repo_root, "build") ctest_cmd = ["ctest", "--label-regex", "(benchmark)"] def cmd_func(): output: cmd_runner.RunCommandOutput = cmd_runner.run_command( cmd=ctest_cmd, cwd=workdir, env=info.toolchain_env, capture=False, check=True) return output.returncode == 0 _run_command_with_sentinel_report(cmd_func=cmd_func, workspace_root=workspace_root, genny_repo_root=genny_repo_root)
def cmake_test(genny_repo_root: str, workspace_root: str, regex: str = None, repeat_until_fail: int = 1): info = toolchain.toolchain_info(genny_repo_root=genny_repo_root, workspace_root=workspace_root) workdir = os.path.join(genny_repo_root, "build") xunit_dir = os.path.join(workspace_root, "build", "XUnitXML") os.makedirs(xunit_dir, exist_ok=True) ctest_cmd = [ "ctest", "--schedule-random", "--output-on-failure", "--parallel", "4", "--repeat-until-fail", str(repeat_until_fail), "--label-exclude", "(standalone|sharded|single_node_replset|three_node_replset|benchmark)", ] if regex is not None: ctest_cmd += ["--tests-regex", regex] def cmd_func() -> bool: output: cmd_runner.RunCommandOutput = cmd_runner.run_command( cmd=ctest_cmd, cwd=workdir, env=info.toolchain_env, capture=False, check=True) return output.returncode == 0 try: _run_command_with_sentinel_report(cmd_func=cmd_func, workspace_root=workspace_root, genny_repo_root=genny_repo_root) except subprocess.CalledProcessError: sys.exit(1)
def install( genny_repo_root: str, workspace_root: str, build_system: str, os_family: str, linux_distro: str, ignore_toolchain_version: bool, ): toolchain_info = toolchain.toolchain_info( genny_repo_root=genny_repo_root, workspace_root=workspace_root, os_family=os_family, linux_distro=linux_distro, ignore_toolchain_version=ignore_toolchain_version, ) install_cmd = [build_system, "-C", "build", "install"] run_command( cmd=install_cmd, env=toolchain_info.toolchain_env, cwd=genny_repo_root, capture=False, check=True, )
def _setup_resmoke( workspace_root: str, genny_repo_root: str, mongo_dir: Optional[str], mongodb_archive_url: Optional[str], ): if mongo_dir is not None: mongo_repo_path = mongo_dir else: evergreen_mongo_repo = os.path.join(workspace_root, "src", "mongo") if os.path.exists(evergreen_mongo_repo): mongo_repo_path = evergreen_mongo_repo else: mongo_repo_path = os.path.join(genny_repo_root, "build", "resmoke-mongo") xunit_xml_path = os.path.join(workspace_root, "build", "XUnitXML") os.makedirs(xunit_xml_path, exist_ok=True) SLOG.info("Created xunit result dir", path=xunit_xml_path) resmoke_venv: str = os.path.join(mongo_repo_path, "resmoke_venv") resmoke_python: str = os.path.join(resmoke_venv, "bin", "python3") # Clone repo unless exists if not os.path.exists(mongo_repo_path): SLOG.info("Mongo repo doesn't exist. Checking it out.", mongo_repo_path=mongo_repo_path) cmd_runner.run_command( cmd=[ "git", "clone", "[email protected]:mongodb/mongo.git", mongo_repo_path ], cwd=workspace_root, check=True, capture=False, ) cmd_runner.run_command( # If changing this sha, you may need to use later binaries # in the _canned_artifacts dict. cmd=[ "git", "checkout", "298d4d6bbb9980b74bded06241067fe6771bef68" ], cwd=mongo_repo_path, check=True, capture=False, ) else: SLOG.info("Using existing mongo repo checkout", mongo_repo_path=mongo_repo_path) cmd_runner.run_command( cmd=["git", "rev-parse", "HEAD"], check=False, cwd=mongo_repo_path, capture=False, ) # Look for mongod in # build/opt/mongo/db/mongod # build/install/bin/mongod # bin/ opt = os.path.join(mongo_repo_path, "build", "opt", "mongo", "db", "mongod") install = os.path.join(mongo_repo_path, "build", "install", "bin", "mongod") from_tarball = os.path.join(mongo_repo_path, "bin", "mongod") if os.path.exists(opt): mongod = opt elif os.path.exists(install): mongod = install elif os.path.exists(from_tarball): mongod = from_tarball else: mongod = None if mongod is not None and mongodb_archive_url is not None: SLOG.info( "Found existing mongod so will not download artifacts.", existing_mongod=mongod, wont_download_artifacts_from=mongodb_archive_url, ) if mongod is None: SLOG.info( "Couldn't find pre-build monogod. Fetching and installing.", looked_at=(opt, install, from_tarball), fetching=mongodb_archive_url, ) if mongodb_archive_url is None: info = toolchain.toolchain_info(genny_repo_root=genny_repo_root, workspace_root=workspace_root) if info.is_darwin: artifact_key = "osx" elif info.linux_distro == "amazon2": artifact_key = "amazon2" else: raise Exception( f"No pre-built artifacts for distro {info.linux_distro}. You can either:" f"1. compile/install a local mongo checkout in ./src/mongo." f"2. Modify the _canned_artifacts dict in the genny python to include an artifact from a waterfall build." f"3. Pass in the --mongodb-archive-url parameter to force a canned artifact." ) mongodb_archive_url = _canned_artifacts[artifact_key] cmd_runner.run_command( cmd=["curl", "-LSs", mongodb_archive_url, "-o", "mongodb.tgz"], cwd=mongo_repo_path, capture=False, check=True, ) cmd_runner.run_command( cmd=["tar", "--strip-components=1", "-zxf", "mongodb.tgz"], cwd=mongo_repo_path, capture=False, check=True, ) mongod = from_tarball bin_dir = os.path.dirname(mongod) # Setup resmoke venv unless exists resmoke_setup_sentinel = os.path.join(resmoke_venv, "setup-done") if not os.path.exists(resmoke_setup_sentinel): SLOG.info("Resmoke venv doesn't exist. Creating.", resmoke_venv=resmoke_venv) shutil.rmtree(resmoke_venv, ignore_errors=True) import venv venv.create(env_dir=resmoke_venv, with_pip=True, symlinks=True) reqs_file = os.path.join(mongo_repo_path, "etc", "pip", "evgtest-requirements.txt") cmd = [resmoke_python, "-mpip", "install", "-r", reqs_file] cmd_runner.run_command( cmd=cmd, cwd=workspace_root, capture=False, check=True, ) open(resmoke_setup_sentinel, "w") return resmoke_python, mongo_repo_path, bin_dir