Beispiel #1
0
def _set_logging_config():
    """Read YAML configuration from 'pathname' how to log tests and fixtures."""
    pathname = _config.LOGGER_FILE
    try:
        # If the user provides a full valid path to a logging config
        # we don't need to search LOGGER_DIR for the file.
        if os.path.exists(pathname):
            logger_config = utils.load_yaml_file(pathname)
            _config.LOGGING_CONFIG = logger_config.pop("logging")
            _config.SHORTEN_LOGGER_NAME_CONFIG = logger_config.pop(
                "shorten_logger_name")
            return

        root = os.path.abspath(_config.LOGGER_DIR)
        files = os.listdir(root)
        for filename in files:
            (short_name, ext) = os.path.splitext(filename)
            if ext in (".yml", ".yaml") and short_name == pathname:
                config_file = os.path.join(root, filename)
                if not os.path.isfile(config_file):
                    raise ValueError(
                        "Expected a logger YAML config, but got '%s'" %
                        pathname)
                logger_config = utils.load_yaml_file(config_file)
                _config.LOGGING_CONFIG = logger_config.pop("logging")
                _config.SHORTEN_LOGGER_NAME_CONFIG = logger_config.pop(
                    "shorten_logger_name")
                return

        raise ValueError("Unknown logger '%s'" % pathname)
    except FileNotFoundError:
        raise IOError("Directory {} does not exist.".format(
            _config.LOGGER_DIR))
Beispiel #2
0
 def get_all_yamls(cls, target_dir):
     """Get all YAML files in the given directory."""
     return {
         short_name: load_yaml_file(path)
         for short_name, path in cls.__get_suite_files_in_dir(
             os.path.abspath(target_dir)).items()
     }
Beispiel #3
0
def _update_symbolizer_secrets():
    """Open `expansions.yml`, get values for symbolizer secrets and update their values inside config.py ."""
    if not _config.EVERGREEN_TASK_ID:
        # not running on Evergreen
        return
    yml_data = utils.load_yaml_file(_config.EXPANSIONS_FILE)
    _config.SYMBOLIZER_CLIENT_SECRET = yml_data.get("symbolizer_client_secret")
    _config.SYMBOLIZER_CLIENT_ID = yml_data.get("symbolizer_client_id")
Beispiel #4
0
    def get_config_obj(pathname):
        """Get the suite config object in the given file."""
        # Named executors or suites are specified as the basename of the file, without the .yml
        # extension.
        if not fs.is_yaml_file(pathname) and not os.path.dirname(pathname):
            if pathname not in _config.NAMED_SUITES:  # pylint: disable=unsupported-membership-test
                # Expand 'pathname' to full path.
                return None
            pathname = _config.NAMED_SUITES[pathname]  # pylint: disable=unsubscriptable-object

        if not fs.is_yaml_file(pathname) or not os.path.isfile(pathname):
            raise optparse.OptionValueError("Expected a suite YAML config, but got '%s'" % pathname)
        return utils.load_yaml_file(pathname)
def _get_yaml_config(kind, pathname):
    # Named executors or suites are specified as the basename of the file, without the .yml
    # extension.
    if not utils.is_yaml_file(pathname) and not os.path.dirname(pathname):
        if pathname not in _config.NAMED_SUITES:  # pylint: disable=unsupported-membership-test
            raise errors.SuiteNotFound("Unknown %s '%s'" % (kind, pathname))
        # Expand 'pathname' to full path.
        pathname = _config.NAMED_SUITES[pathname]  # pylint: disable=unsubscriptable-object

    if not utils.is_yaml_file(pathname) or not os.path.isfile(pathname):
        raise optparse.OptionValueError(
            "Expected a %s YAML config, but got '%s'" % (kind, pathname))
    return utils.load_yaml_file(pathname)
Beispiel #6
0
def compare_start_time(cur_time_secs):
    """
    Return the difference between the current unix time in seconds and the start time in seconds.

    :param cur_time_secs: current unix time in seconds; can be obtained from time.time()
    :return: difference in seconds.
    """
    try:
        cur_timefile = utils.load_yaml_file(_START_TIME_FILE)
        start_time_secs = cur_timefile["start_time"]
    except (FileNotFoundError, KeyError) as erros:
        raise FileNotFoundError("resmoke.py did not successfully record its start time") from erros

    return cur_time_secs - start_time_secs
Beispiel #7
0
    def get_config_obj(cls, suite_name):
        """Get the suite config object in the given file."""
        # Named executors or suites are specified as the basename of the file, without the .yml
        # extension.
        if not fs.is_yaml_file(suite_name) and not os.path.dirname(suite_name):
            named_suites = cls.get_named_suites()
            if suite_name not in named_suites:  # pylint: disable=unsupported-membership-test
                return None
            suite_name = named_suites[suite_name]  # pylint: disable=unsubscriptable-object

        if not fs.is_yaml_file(suite_name) or not os.path.isfile(suite_name):
            raise ValueError("Expected a suite YAML config, but got '%s'" %
                             suite_name)
        return utils.load_yaml_file(suite_name)
Beispiel #8
0
    def get_all_yamls(target_dir):
        """Get all YAML files in the given directory."""
        all_files = {}
        root = os.path.abspath(target_dir)
        files = os.listdir(root)

        for filename in files:
            (short_name, ext) = os.path.splitext(filename)
            if ext in (".yml", ".yaml"):
                pathname = os.path.join(root, filename)

                if not fs.is_yaml_file(pathname) or not os.path.isfile(pathname):
                    raise optparse.OptionValueError(
                        "Expected a suite YAML config, but got '%s'" % pathname)
                all_files[short_name] = load_yaml_file(pathname)
        return all_files
Beispiel #9
0
    def get_config_obj(cls, suite_name):
        """Get the suite config object in the given file."""
        if suite_name in cls.get_named_suites():
            # Check if is a named suite first for efficiency.
            suite_path = cls.get_named_suites()[suite_name]
        elif fs.is_yaml_file(suite_name):
            # Check if is a path to a YAML file.
            if os.path.isfile(suite_name):
                suite_path = suite_name
            else:
                raise ValueError("Expected a suite YAML config, but got '%s'" %
                                 suite_name)
        else:
            # Not an explicit suite, return None.
            return None

        return utils.load_yaml_file(suite_path)
Beispiel #10
0
    def __init__(self, filename=None, yaml_string=None, raw_dict=None):
        """Init from a yaml file, from a yaml string, or default-construct."""

        super(HistoryDict, self).__init__()

        if filename is not None and yaml_string is not None:
            raise ValueError(
                "Cannot construct HistoryDict from both yaml object and file.")

        self._history_store = defaultdict(list)
        self._value_store = dict()
        self._global_time = 0

        raw_dict = default_if_none(raw_dict, {})
        if filename is not None:
            raw_dict = load_yaml_file(filename)
        elif yaml_string is not None:
            raw_dict = load_yaml(yaml_string)
        else:
            return  # Just default-construct.

        schema_version = raw_dict["SchemaVersion"]
        if schema_version != SCHEMA_VERSION:
            raise ValueError(
                f"Invalid schema version. Expected {SCHEMA_VERSION} but found {schema_version}."
            )
        history_dict = raw_dict["History"]
        for key in history_dict:
            for raw_access in history_dict[key]:
                access = Access.from_dict(raw_access)
                self._history_store[key].append(access)
                self._global_time = max(access.time, self._global_time)
            last_val = self._retrieve_last_value(key)
            if last_val is not TOMBSTONE:
                self._value_store[key] = last_val

        # The next recorded global time should be 1 higher than the last.
        self._global_time += 1
Beispiel #11
0
def _update_config_vars(values):  # pylint: disable=too-many-statements,too-many-locals,too-many-branches
    """Update the variables of the config module."""

    config = _config.DEFAULTS.copy()

    # Override `config` with values from command line arguments.
    cmdline_vars = vars(values)
    for cmdline_key in cmdline_vars:
        if cmdline_key not in _config.DEFAULTS:
            # Ignore options that don't map to values in config.py
            continue
        if cmdline_vars[cmdline_key] is not None:
            config[cmdline_key] = cmdline_vars[cmdline_key]

    if os.path.isfile("resmoke.ini"):
        config_parser = configparser.ConfigParser()
        config_parser.read("resmoke.ini")
        if "resmoke" in config_parser.sections():
            user_config = dict(config_parser["resmoke"])
            config.update(user_config)

    def setup_feature_flags():
        _config.RUN_ALL_FEATURE_FLAG_TESTS = config.pop(
            "run_all_feature_flag_tests")
        all_feature_flags = []
        enabled_feature_flags = []
        try:
            all_feature_flags = open(ALL_FEATURE_FLAG_FILE).read().split()
        except FileNotFoundError:
            # If we ask resmoke to run with all feature flags, the feature flags file
            # needs to exist.
            if _config.RUN_ALL_FEATURE_FLAG_TESTS:
                raise

        if _config.RUN_ALL_FEATURE_FLAG_TESTS:
            enabled_feature_flags = all_feature_flags[:]

        # Specify additional feature flags from the command line.
        # Set running all feature flag tests to True if this options is specified.
        additional_feature_flags = config.pop("additional_feature_flags")
        if additional_feature_flags is not None:
            enabled_feature_flags.extend(additional_feature_flags)

        return enabled_feature_flags, all_feature_flags

    _config.ENABLED_FEATURE_FLAGS, all_feature_flags = setup_feature_flags()
    not_enabled_feature_flags = list(
        set(all_feature_flags) - set(_config.ENABLED_FEATURE_FLAGS))

    _config.ALWAYS_USE_LOG_FILES = config.pop("always_use_log_files")
    _config.BASE_PORT = int(config.pop("base_port"))
    _config.BACKUP_ON_RESTART_DIR = config.pop("backup_on_restart_dir")
    _config.BUILDLOGGER_URL = config.pop("buildlogger_url")
    _config.DBPATH_PREFIX = _expand_user(config.pop("dbpath_prefix"))
    _config.DRY_RUN = config.pop("dry_run")

    # EXCLUDE_WITH_ANY_TAGS will always contain the implicitly defined EXCLUDED_TAG.
    _config.EXCLUDE_WITH_ANY_TAGS = [_config.EXCLUDED_TAG]
    _config.EXCLUDE_WITH_ANY_TAGS.extend(
        utils.default_if_none(
            _tags_from_list(config.pop("exclude_with_any_tags")), []))

    # Don't run tests with feature flags that are not enabled.
    _config.EXCLUDE_WITH_ANY_TAGS.extend(not_enabled_feature_flags)

    _config.FAIL_FAST = not config.pop("continue_on_failure")
    _config.FLOW_CONTROL = config.pop("flow_control")
    _config.FLOW_CONTROL_TICKETS = config.pop("flow_control_tickets")

    _config.INCLUDE_WITH_ANY_TAGS = _tags_from_list(
        config.pop("include_with_any_tags"))

    _config.GENNY_EXECUTABLE = _expand_user(config.pop("genny_executable"))
    _config.JOBS = config.pop("jobs")
    _config.LINEAR_CHAIN = config.pop("linear_chain") == "on"
    _config.MAJORITY_READ_CONCERN = config.pop("majority_read_concern") == "on"
    _config.MIXED_BIN_VERSIONS = config.pop("mixed_bin_versions")
    if _config.MIXED_BIN_VERSIONS is not None:
        _config.MIXED_BIN_VERSIONS = _config.MIXED_BIN_VERSIONS.split("-")

    _config.INSTALL_DIR = config.pop("install_dir")
    if _config.INSTALL_DIR is not None:
        # Normalize the path so that on Windows dist-test/bin
        # translates to .\dist-test\bin then absolutify it since the
        # Windows PATH variable requires absolute paths.
        _config.INSTALL_DIR = os.path.abspath(
            _expand_user(os.path.normpath(_config.INSTALL_DIR)))

        for binary in ["mongo", "mongod", "mongos", "dbtest"]:
            keyname = binary + "_executable"
            if config.get(keyname, None) is None:
                config[keyname] = os.path.join(_config.INSTALL_DIR, binary)

    _config.DBTEST_EXECUTABLE = _expand_user(config.pop("dbtest_executable"))
    _config.MONGO_EXECUTABLE = _expand_user(config.pop("mongo_executable"))

    def _merge_set_params(param_list):
        ret = {}
        for set_param in param_list:
            ret.update(utils.load_yaml(set_param))
        return utils.dump_yaml(ret)

    _config.MONGOD_EXECUTABLE = _expand_user(config.pop("mongod_executable"))

    mongod_set_parameters = config.pop("mongod_set_parameters")
    if _config.ENABLED_FEATURE_FLAGS:
        feature_flag_dict = {
            ff: "true"
            for ff in _config.ENABLED_FEATURE_FLAGS
        }
        mongod_set_parameters.append(str(feature_flag_dict))

    _config.MONGOD_SET_PARAMETERS = _merge_set_params(mongod_set_parameters)
    _config.FUZZ_MONGOD_CONFIGS = config.pop("fuzz_mongod_configs")
    _config.CONFIG_FUZZ_SEED = config.pop("config_fuzz_seed")

    if _config.FUZZ_MONGOD_CONFIGS:
        if not _config.CONFIG_FUZZ_SEED:
            _config.CONFIG_FUZZ_SEED = random.randrange(sys.maxsize)
        else:
            _config.CONFIG_FUZZ_SEED = int(_config.CONFIG_FUZZ_SEED)
        _config.MONGOD_SET_PARAMETERS, _config.WT_ENGINE_CONFIG = mongod_fuzzer_configs \
            .fuzz_set_parameters(_config.CONFIG_FUZZ_SEED, _config.MONGOD_SET_PARAMETERS)

    _config.MONGOS_EXECUTABLE = _expand_user(config.pop("mongos_executable"))

    mongos_set_parameters = config.pop("mongos_set_parameters")
    if _config.ENABLED_FEATURE_FLAGS:
        feature_flag_dict = {
            ff: "true"
            for ff in _config.ENABLED_FEATURE_FLAGS
        }
        mongos_set_parameters.append(str(feature_flag_dict))

    _config.MONGOS_SET_PARAMETERS = _merge_set_params(mongos_set_parameters)

    _config.MONGOCRYPTD_SET_PARAMETERS = _merge_set_params(
        config.pop("mongocryptd_set_parameters"))

    _config.MRLOG = config.pop("mrlog")
    _config.NO_JOURNAL = config.pop("no_journal")
    _config.NUM_CLIENTS_PER_FIXTURE = config.pop("num_clients_per_fixture")
    _config.NUM_REPLSET_NODES = config.pop("num_replset_nodes")
    _config.NUM_SHARDS = config.pop("num_shards")
    _config.PERF_REPORT_FILE = config.pop("perf_report_file")
    _config.RANDOM_SEED = config.pop("seed")
    _config.REPEAT_SUITES = config.pop("repeat_suites")
    _config.REPEAT_TESTS = config.pop("repeat_tests")
    _config.REPEAT_TESTS_MAX = config.pop("repeat_tests_max")
    _config.REPEAT_TESTS_MIN = config.pop("repeat_tests_min")
    _config.REPEAT_TESTS_SECS = config.pop("repeat_tests_secs")
    _config.REPORT_FAILURE_STATUS = config.pop("report_failure_status")
    _config.REPORT_FILE = config.pop("report_file")
    _config.SERVICE_EXECUTOR = config.pop("service_executor")
    _config.SHELL_READ_MODE = config.pop("shell_read_mode")
    _config.SHELL_WRITE_MODE = config.pop("shell_write_mode")
    _config.SPAWN_USING = config.pop("spawn_using")
    _config.EXPORT_MONGOD_CONFIG = config.pop("export_mongod_config")
    _config.STAGGER_JOBS = config.pop("stagger_jobs") == "on"
    _config.STORAGE_ENGINE = config.pop("storage_engine")
    _config.STORAGE_ENGINE_CACHE_SIZE = config.pop(
        "storage_engine_cache_size_gb")
    _config.SUITE_FILES = config.pop("suite_files")
    if _config.SUITE_FILES is not None:
        _config.SUITE_FILES = _config.SUITE_FILES.split(",")
    _config.TAG_FILE = config.pop("tag_file")
    _config.TRANSPORT_LAYER = config.pop("transport_layer")
    _config.USER_FRIENDLY_OUTPUT = config.pop("user_friendly_output")

    # Internal testing options.
    _config.INTERNAL_PARAMS = config.pop("internal_params")

    # Evergreen options.
    _config.EVERGREEN_URL = config.pop("evergreen_url")
    _config.EVERGREEN_BUILD_ID = config.pop("build_id")
    _config.EVERGREEN_DISTRO_ID = config.pop("distro_id")
    _config.EVERGREEN_EXECUTION = config.pop("execution_number")
    _config.EVERGREEN_PATCH_BUILD = config.pop("patch_build")
    _config.EVERGREEN_PROJECT_NAME = config.pop("project_name")
    _config.EVERGREEN_REVISION = config.pop("git_revision")
    _config.EVERGREEN_REVISION_ORDER_ID = config.pop("revision_order_id")
    _config.EVERGREEN_TASK_ID = config.pop("task_id")
    _config.EVERGREEN_TASK_NAME = config.pop("task_name")
    _config.EVERGREEN_TASK_DOC = config.pop("task_doc")
    _config.EVERGREEN_VARIANT_NAME = config.pop("variant_name")
    _config.EVERGREEN_VERSION_ID = config.pop("version_id")

    # Cedar options.
    _config.CEDAR_URL = config.pop("cedar_url")
    _config.CEDAR_RPC_PORT = config.pop("cedar_rpc_port")

    def calculate_debug_symbol_url():
        url = "https://mciuploads.s3.amazonaws.com/"
        project_name = _config.EVERGREEN_PROJECT_NAME
        variant_name = _config.EVERGREEN_VARIANT_NAME
        revision = _config.EVERGREEN_REVISION
        task_id = _config.EVERGREEN_TASK_ID
        if (variant_name is not None) and (revision
                                           is not None) and (task_id
                                                             is not None):
            url = "/".join([
                project_name, variant_name, revision, task_id,
                f"/debugsymbols/debugsymbols-{task_id}"
            ])
            url = url + ".tgz" if sys.platform == "win32" else ".zip"
            return url
        return None

    if _config.DEBUG_SYMBOL_PATCH_URL is None:
        _config.DEBUG_SYMBOL_PATCH_URL = calculate_debug_symbol_url()

    # Archival options. Archival is enabled only when running on evergreen.
    if not _config.EVERGREEN_TASK_ID:
        _config.ARCHIVE_FILE = None
    else:
        # Enable archival globally for all required mainline builders.
        if (_config.EVERGREEN_VARIANT_NAME is not None
                and "-required" in _config.EVERGREEN_VARIANT_NAME
                and not _config.EVERGREEN_PATCH_BUILD):
            _config.FORCE_ARCHIVE_ALL_DATA_FILES = True

    _config.ARCHIVE_LIMIT_MB = config.pop("archive_limit_mb")
    _config.ARCHIVE_LIMIT_TESTS = config.pop("archive_limit_tests")

    # Wiredtiger options.
    _config.WT_COLL_CONFIG = config.pop("wt_coll_config")
    wt_engine_config = config.pop("wt_engine_config")
    if wt_engine_config:  # prevents fuzzed wt_engine_config from being overwritten unless user specifies it
        _config.WT_ENGINE_CONFIG = config.pop("wt_engine_config")
    _config.WT_INDEX_CONFIG = config.pop("wt_index_config")

    # Benchmark/Benchrun options.
    _config.BENCHMARK_FILTER = config.pop("benchmark_filter")
    _config.BENCHMARK_LIST_TESTS = config.pop("benchmark_list_tests")
    benchmark_min_time = config.pop("benchmark_min_time_secs")
    if benchmark_min_time is not None:
        _config.BENCHMARK_MIN_TIME = datetime.timedelta(
            seconds=benchmark_min_time)
    _config.BENCHMARK_REPETITIONS = config.pop("benchmark_repetitions")

    # Config Dir options.
    _config.CONFIG_DIR = config.pop("config_dir")

    # Configure evergreen task documentation
    if _config.EVERGREEN_TASK_NAME:
        task_name = utils.get_task_name_without_suffix(
            _config.EVERGREEN_TASK_NAME, _config.EVERGREEN_VARIANT_NAME)
        evg_task_doc_file = os.path.join(_config.CONFIG_DIR, "evg_task_doc",
                                         "evg_task_doc.yml")
        if os.path.exists(evg_task_doc_file):
            evg_task_doc = utils.load_yaml_file(evg_task_doc_file)
            if task_name in evg_task_doc:
                _config.EVERGREEN_TASK_DOC = evg_task_doc[task_name]

    _config.UNDO_RECORDER_PATH = config.pop("undo_recorder_path")

    # Populate the named suites by scanning config_dir/suites
    named_suites = {}

    def configure_tests(test_files, replay_file):
        # `_validate_options` has asserted that at most one of `test_files` and `replay_file` contains input.

        to_replay = None
        # Treat `resmoke run @to_replay` as `resmoke run --replayFile to_replay`
        if len(test_files) == 1 and test_files[0].startswith("@"):
            to_replay = test_files[0][1:]
        elif replay_file:
            to_replay = replay_file

        if to_replay:
            # The replay file is expected to be one file per line, but cope with extra whitespace.
            with open(to_replay) as fd:
                _config.TEST_FILES = fd.read().split()
        else:
            _config.TEST_FILES = test_files

    configure_tests(config.pop("test_files"), config.pop("replay_file"))

    suites_dir = os.path.join(_config.CONFIG_DIR, "suites")
    root = os.path.abspath(suites_dir)
    files = os.listdir(root)
    for filename in files:
        (short_name, ext) = os.path.splitext(filename)
        if ext in (".yml", ".yaml"):
            pathname = os.path.join(root, filename)
            named_suites[short_name] = pathname

    _config.NAMED_SUITES = named_suites

    _config.LOGGER_DIR = os.path.join(_config.CONFIG_DIR, "loggers")

    shuffle = config.pop("shuffle")
    if shuffle == "auto":
        # If the user specified a value for --jobs > 1 (or -j > 1), then default to randomize
        # the order in which tests are executed. This is because with multiple threads the tests
        # wouldn't run in a deterministic order anyway.
        _config.SHUFFLE = _config.JOBS > 1
    else:
        _config.SHUFFLE = shuffle == "on"

    conn_string = config.pop("shell_conn_string")
    port = config.pop("shell_port")

    if port is not None:
        conn_string = "mongodb://localhost:" + port

    if conn_string is not None:
        # The --shellConnString command line option must be a MongoDB connection URI, which means it
        # must specify the mongodb:// or mongodb+srv:// URI scheme. pymongo.uri_parser.parse_uri()
        # raises an exception if the connection string specified isn't considered a valid MongoDB
        # connection URI.
        pymongo.uri_parser.parse_uri(conn_string)
        _config.SHELL_CONN_STRING = conn_string

    _config.LOGGER_FILE = config.pop("logger_file")

    if config:
        raise ValueError(f"Unknown option(s): {list(config.keys())}s")
Beispiel #12
0
def _update_config_vars(values):  # pylint: disable=too-many-statements,too-many-locals,too-many-branches
    """Update the variables of the config module."""

    config = _config.DEFAULTS.copy()

    # Override `config` with values from command line arguments.
    cmdline_vars = vars(values)
    for cmdline_key in cmdline_vars:
        if cmdline_key not in _config.DEFAULTS:
            # Ignore options that don't map to values in config.py
            continue
        if cmdline_vars[cmdline_key] is not None:
            config[cmdline_key] = cmdline_vars[cmdline_key]

    if values.command == "run" and os.path.isfile("resmoke.ini"):
        err = textwrap.dedent("""\
Support for resmoke.ini has been removed. You must delete
resmoke.ini and rerun your build to run resmoke. If only one testable
installation is present, resmoke will automatically locate that installation.
If you have multiple installations, you must either pass an explicit
--installDir argument to the run subcommand to identify the installation you
would like to test, or invoke the customized resmoke.py wrapper script staged
into the bin directory of each installation.""")
        config_parser = configparser.ConfigParser()
        config_parser.read("resmoke.ini")
        if "resmoke" in config_parser.sections():
            user_config = dict(config_parser["resmoke"])
            err += textwrap.dedent(f"""

Based on the current value of resmoke.ini, after rebuilding, resmoke.py should
be invoked as either:
- {shlex.quote(f"{user_config['install_dir']}/resmoke.py")}
- buildscripts/resmoke.py --installDir {shlex.quote(user_config['install_dir'])}"""
                                   )
        raise RuntimeError(err)

    def setup_feature_flags():
        _config.RUN_ALL_FEATURE_FLAG_TESTS = config.pop(
            "run_all_feature_flag_tests")
        _config.RUN_ALL_FEATURE_FLAGS = config.pop(
            "run_all_feature_flags_no_tests")

        # Running all feature flag tests implies running the fixtures with feature flags.
        if _config.RUN_ALL_FEATURE_FLAG_TESTS:
            _config.RUN_ALL_FEATURE_FLAGS = True

        all_ff = []
        enabled_feature_flags = []
        try:
            with open(ALL_FEATURE_FLAG_FILE) as fd:
                all_ff = fd.read().split()
        except FileNotFoundError:
            # If we ask resmoke to run with all feature flags, the feature flags file
            # needs to exist.
            if _config.RUN_ALL_FEATURE_FLAGS:
                raise

        if _config.RUN_ALL_FEATURE_FLAGS:
            enabled_feature_flags = all_ff[:]

        # Specify additional feature flags from the command line.
        # Set running all feature flag tests to True if this options is specified.
        additional_feature_flags = _tags_from_list(
            config.pop("additional_feature_flags"))
        if additional_feature_flags is not None:
            enabled_feature_flags.extend(additional_feature_flags)

        return enabled_feature_flags, all_ff

    _config.ENABLED_FEATURE_FLAGS, all_feature_flags = setup_feature_flags()
    not_enabled_feature_flags = list(
        set(all_feature_flags) - set(_config.ENABLED_FEATURE_FLAGS))

    _config.ALWAYS_USE_LOG_FILES = config.pop("always_use_log_files")
    _config.BASE_PORT = int(config.pop("base_port"))
    _config.BACKUP_ON_RESTART_DIR = config.pop("backup_on_restart_dir")
    _config.BUILDLOGGER_URL = config.pop("buildlogger_url")
    _config.DBPATH_PREFIX = _expand_user(config.pop("dbpath_prefix"))
    _config.DRY_RUN = config.pop("dry_run")

    # EXCLUDE_WITH_ANY_TAGS will always contain the implicitly defined EXCLUDED_TAG.
    _config.EXCLUDE_WITH_ANY_TAGS = [_config.EXCLUDED_TAG]
    _config.EXCLUDE_WITH_ANY_TAGS.extend(
        utils.default_if_none(
            _tags_from_list(config.pop("exclude_with_any_tags")), []))

    if _config.RUN_ALL_FEATURE_FLAGS and not _config.RUN_ALL_FEATURE_FLAG_TESTS:
        # Don't run any feature flag tests.
        _config.EXCLUDE_WITH_ANY_TAGS.extend(all_feature_flags)
    else:
        # Don't run tests with feature flags that are not enabled.
        _config.EXCLUDE_WITH_ANY_TAGS.extend(not_enabled_feature_flags)

    _config.FAIL_FAST = not config.pop("continue_on_failure")
    _config.FLOW_CONTROL = config.pop("flow_control")
    _config.FLOW_CONTROL_TICKETS = config.pop("flow_control_tickets")

    _config.INCLUDE_WITH_ANY_TAGS = _tags_from_list(
        config.pop("include_with_any_tags"))
    _config.INCLUDE_TAGS = _tags_from_list(config.pop("include_with_all_tags"))

    _config.GENNY_EXECUTABLE = _expand_user(config.pop("genny_executable"))
    _config.JOBS = config.pop("jobs")
    _config.LINEAR_CHAIN = config.pop("linear_chain") == "on"
    _config.MAJORITY_READ_CONCERN = config.pop("majority_read_concern") == "on"
    _config.MIXED_BIN_VERSIONS = config.pop("mixed_bin_versions")
    if _config.MIXED_BIN_VERSIONS is not None:
        _config.MIXED_BIN_VERSIONS = _config.MIXED_BIN_VERSIONS.split("-")

    _config.MULTIVERSION_BIN_VERSION = config.pop("old_bin_version")

    _config.INSTALL_DIR = config.pop("install_dir")
    if values.command == "run" and _config.INSTALL_DIR is None:
        resmoke_wrappers = _find_resmoke_wrappers()
        if len(resmoke_wrappers) == 1:
            _config.INSTALL_DIR = os.path.dirname(resmoke_wrappers[0])
        elif len(resmoke_wrappers) > 1:
            err = textwrap.dedent(f"""\
Multiple testable installations were found, but installDir was not specified.
You must either call resmoke via one of the following scripts:
{os.linesep.join(map(shlex.quote, resmoke_wrappers))}

or explicitly pass --installDir to the run subcommand of buildscripts/resmoke.py."""
                                  )
            raise RuntimeError(err)
    if _config.INSTALL_DIR is not None:
        # Normalize the path so that on Windows dist-test/bin
        # translates to .\dist-test\bin then absolutify it since the
        # Windows PATH variable requires absolute paths.
        _config.INSTALL_DIR = os.path.abspath(
            _expand_user(os.path.normpath(_config.INSTALL_DIR)))

        for binary in ["mongo", "mongod", "mongos", "dbtest"]:
            keyname = binary + "_executable"
            if config.get(keyname, None) is None:
                config[keyname] = os.path.join(_config.INSTALL_DIR, binary)

    _config.DBTEST_EXECUTABLE = _expand_user(config.pop("dbtest_executable"))
    _config.MONGO_EXECUTABLE = _expand_user(config.pop("mongo_executable"))

    def _merge_set_params(param_list):
        ret = {}
        for set_param in param_list:
            ret.update(utils.load_yaml(set_param))
        return utils.dump_yaml(ret)

    _config.MONGOD_EXECUTABLE = _expand_user(config.pop("mongod_executable"))

    mongod_set_parameters = config.pop("mongod_set_parameters")

    _config.MONGOD_SET_PARAMETERS = _merge_set_params(mongod_set_parameters)
    _config.FUZZ_MONGOD_CONFIGS = config.pop("fuzz_mongod_configs")
    _config.CONFIG_FUZZ_SEED = config.pop("config_fuzz_seed")

    if _config.FUZZ_MONGOD_CONFIGS:
        if not _config.CONFIG_FUZZ_SEED:
            _config.CONFIG_FUZZ_SEED = random.randrange(sys.maxsize)
        else:
            _config.CONFIG_FUZZ_SEED = int(_config.CONFIG_FUZZ_SEED)
        _config.MONGOD_SET_PARAMETERS, _config.WT_ENGINE_CONFIG, _config.WT_COLL_CONFIG, \
        _config.WT_INDEX_CONFIG = mongod_fuzzer_configs.fuzz_set_parameters(
            _config.CONFIG_FUZZ_SEED, _config.MONGOD_SET_PARAMETERS)

    _config.MONGOS_EXECUTABLE = _expand_user(config.pop("mongos_executable"))
    mongos_set_parameters = config.pop("mongos_set_parameters")
    _config.MONGOS_SET_PARAMETERS = _merge_set_params(mongos_set_parameters)

    _config.MONGOCRYPTD_SET_PARAMETERS = _merge_set_params(
        config.pop("mongocryptd_set_parameters"))

    _config.MRLOG = config.pop("mrlog")
    _config.NO_JOURNAL = config.pop("no_journal")
    _config.NUM_CLIENTS_PER_FIXTURE = config.pop("num_clients_per_fixture")
    _config.NUM_REPLSET_NODES = config.pop("num_replset_nodes")
    _config.NUM_SHARDS = config.pop("num_shards")
    _config.PERF_REPORT_FILE = config.pop("perf_report_file")
    _config.CEDAR_REPORT_FILE = config.pop("cedar_report_file")
    _config.RANDOM_SEED = config.pop("seed")
    _config.REPEAT_SUITES = config.pop("repeat_suites")
    _config.REPEAT_TESTS = config.pop("repeat_tests")
    _config.REPEAT_TESTS_MAX = config.pop("repeat_tests_max")
    _config.REPEAT_TESTS_MIN = config.pop("repeat_tests_min")
    _config.REPEAT_TESTS_SECS = config.pop("repeat_tests_secs")
    _config.REPORT_FAILURE_STATUS = config.pop("report_failure_status")
    _config.REPORT_FILE = config.pop("report_file")
    _config.SERVICE_EXECUTOR = config.pop("service_executor")
    _config.EXPORT_MONGOD_CONFIG = config.pop("export_mongod_config")
    _config.STAGGER_JOBS = config.pop("stagger_jobs") == "on"
    _config.STORAGE_ENGINE = config.pop("storage_engine")
    _config.STORAGE_ENGINE_CACHE_SIZE = config.pop(
        "storage_engine_cache_size_gb")
    _config.SUITE_FILES = config.pop("suite_files")
    if _config.SUITE_FILES is not None:
        _config.SUITE_FILES = _config.SUITE_FILES.split(",")
    _config.TAG_FILES = config.pop("tag_files")
    _config.TRANSPORT_LAYER = config.pop("transport_layer")
    _config.USER_FRIENDLY_OUTPUT = config.pop("user_friendly_output")

    # Internal testing options.
    _config.INTERNAL_PARAMS = config.pop("internal_params")

    # Evergreen options.
    _config.EVERGREEN_URL = config.pop("evergreen_url")
    _config.EVERGREEN_BUILD_ID = config.pop("build_id")
    _config.EVERGREEN_DISTRO_ID = config.pop("distro_id")
    _config.EVERGREEN_EXECUTION = config.pop("execution_number")
    _config.EVERGREEN_PATCH_BUILD = config.pop("patch_build")
    _config.EVERGREEN_PROJECT_NAME = config.pop("project_name")
    _config.EVERGREEN_REVISION = config.pop("git_revision")
    _config.EVERGREEN_REVISION_ORDER_ID = config.pop("revision_order_id")
    _config.EVERGREEN_TASK_ID = config.pop("task_id")
    _config.EVERGREEN_TASK_NAME = config.pop("task_name")
    _config.EVERGREEN_TASK_DOC = config.pop("task_doc")
    _config.EVERGREEN_VARIANT_NAME = config.pop("variant_name")
    _config.EVERGREEN_VERSION_ID = config.pop("version_id")

    # Archival options. Archival is enabled only when running on evergreen.
    if not _config.EVERGREEN_TASK_ID:
        _config.ARCHIVE_FILE = None
    else:
        # Enable archival globally for all mainline variants.
        if _config.EVERGREEN_VARIANT_NAME is not None and not _config.EVERGREEN_PATCH_BUILD:
            _config.FORCE_ARCHIVE_ALL_DATA_FILES = True

    _config.ARCHIVE_LIMIT_MB = config.pop("archive_limit_mb")
    _config.ARCHIVE_LIMIT_TESTS = config.pop("archive_limit_tests")

    # Wiredtiger options. Prevent fuzzed wt configs from being overwritten unless user specifies it.
    wt_engine_config = config.pop("wt_engine_config")
    if wt_engine_config:
        _config.WT_ENGINE_CONFIG = config.pop("wt_engine_config")
    wt_coll_config = config.pop("wt_coll_config")
    if wt_coll_config:
        _config.WT_COLL_CONFIG = config.pop("wt_coll_config")
    wt_index_config = config.pop("wt_index_config")
    if wt_index_config:
        _config.WT_INDEX_CONFIG = config.pop("wt_index_config")

    # Benchmark/Benchrun options.
    _config.BENCHMARK_FILTER = config.pop("benchmark_filter")
    _config.BENCHMARK_LIST_TESTS = config.pop("benchmark_list_tests")
    benchmark_min_time = config.pop("benchmark_min_time_secs")
    if benchmark_min_time is not None:
        _config.BENCHMARK_MIN_TIME = datetime.timedelta(
            seconds=benchmark_min_time)
    _config.BENCHMARK_REPETITIONS = config.pop("benchmark_repetitions")

    # Config Dir options.
    _config.CONFIG_DIR = config.pop("config_dir")

    # Configure evergreen task documentation
    if _config.EVERGREEN_TASK_NAME:
        task_name = utils.get_task_name_without_suffix(
            _config.EVERGREEN_TASK_NAME, _config.EVERGREEN_VARIANT_NAME)
        evg_task_doc_file = os.path.join(_config.CONFIG_DIR, "evg_task_doc",
                                         "evg_task_doc.yml")
        if os.path.exists(evg_task_doc_file):
            evg_task_doc = utils.load_yaml_file(evg_task_doc_file)
            if task_name in evg_task_doc:
                _config.EVERGREEN_TASK_DOC = evg_task_doc[task_name]

    _config.UNDO_RECORDER_PATH = config.pop("undo_recorder_path")

    _config.EXCLUDE_TAGS_FILE_PATH = config.pop("exclude_tags_file_path")

    _config.MAX_TEST_QUEUE_SIZE = config.pop("max_test_queue_size")

    def configure_tests(test_files, replay_file):
        # `_validate_options` has asserted that at most one of `test_files` and `replay_file` contains input.

        to_replay = None
        # Treat `resmoke run @to_replay` as `resmoke run --replayFile to_replay`
        if len(test_files) == 1 and test_files[0].startswith("@"):
            to_replay = test_files[0][1:]
        elif replay_file:
            to_replay = replay_file

        if to_replay:
            # The replay file is expected to be one file per line, but cope with extra whitespace.
            with open(to_replay) as fd:
                _config.TEST_FILES = fd.read().split()
        else:
            _config.TEST_FILES = test_files

    configure_tests(config.pop("test_files"), config.pop("replay_file"))

    _config.LOGGER_DIR = os.path.join(_config.CONFIG_DIR, "loggers")

    shuffle = config.pop("shuffle")
    if shuffle == "auto":
        # If the user specified a value for --jobs > 1 (or -j > 1), then default to randomize
        # the order in which tests are executed. This is because with multiple threads the tests
        # wouldn't run in a deterministic order anyway.
        _config.SHUFFLE = _config.JOBS > 1
    else:
        _config.SHUFFLE = shuffle == "on"

    conn_string = config.pop("shell_conn_string")
    port = config.pop("shell_port")

    if port is not None:
        conn_string = "mongodb://localhost:" + port

    if conn_string is not None:
        # The --shellConnString command line option must be a MongoDB connection URI, which means it
        # must specify the mongodb:// or mongodb+srv:// URI scheme. pymongo.uri_parser.parse_uri()
        # raises an exception if the connection string specified isn't considered a valid MongoDB
        # connection URI.
        pymongo.uri_parser.parse_uri(conn_string)
        _config.SHELL_CONN_STRING = conn_string

    _config.LOGGER_FILE = config.pop("logger_file")

    if config:
        raise ValueError(f"Unknown option(s): {list(config.keys())}s")