def test_apply_product_config_unknown_product(): """ `apply_product_config` raises an exception if a keyed option in the config doesn't have a value for the `cot_product` in the config. """ c = { 'cot_product': 'seamonkey', 'keyed': { 'by-cot-product': { 'thunderbird': 'expected', 'firefox': 'unexpected', } }, } with pytest.raises(config.ConfigError): config.apply_product_config(c)
def build_config(override, basedir): randstring = slugid.nice()[0:6] config = get_unfrozen_copy(DEFAULT_CONFIG) ED25519_DIR = os.path.join(os.path.dirname(__file__), "data", "ed25519") config.update({ 'log_dir': os.path.join(basedir, "log"), 'artifact_dir': os.path.join(basedir, "artifact"), 'task_log_dir': os.path.join(basedir, "artifact", "public", "logs"), 'work_dir': os.path.join(basedir, "work"), "worker_type": "dummy-worker-{}".format(randstring), "worker_id": "dummy-worker-{}".format(randstring), 'artifact_upload_timeout': 60 * 2, 'poll_interval': 5, 'reclaim_interval': 5, 'task_script': ('bash', '-c', '>&2 echo bar && echo foo && sleep 9 && exit 1'), 'task_max_timeout': 60, 'cot_product': 'firefox', 'ed25519_private_key_path': os.path.join(ED25519_DIR, 'scriptworker_private_key'), 'ed25519_public_keys': { 'docker-worker': ['8dBv4bbnZ3RsDzQiPKTJ18uo3hq5Rjm94JG6HXzAcBM='], 'generic-worker': ['PkI5NslA78wSsYaKNzKq7iD7MLQy7W6wYO/0WFd4tWM='], 'scriptworker': ['KxYrV3XAJ3uOyAUX0Wcl1Oeu6GSMrI/5hOn39q8Lf0I='], }, }) creds = read_integration_creds() del(config['credentials']) if isinstance(override, dict): config.update(override) with open(os.path.join(basedir, "config.json"), "w") as fh: json.dump(config, fh, indent=2, sort_keys=True) config = apply_product_config(config) return config, creds
async def build_config(override, basedir): config = get_unfrozen_copy(DEFAULT_CONFIG) config.update({ "log_dir": os.path.join(basedir, "log"), "artifact_dir": os.path.join(basedir, "artifact"), "task_log_dir": os.path.join(basedir, "artifact", "public", "logs"), "work_dir": os.path.join(basedir, "work"), "ed25519_private_key_path": "", "github_oauth_token": await _get_github_token(), }) del config["credentials"] if isinstance(override, dict): config.update(override) with open(os.path.join(basedir, "config.json"), "w") as fh: json.dump(config, fh, indent=2, sort_keys=True) config = apply_product_config(config) # Avoid creating a `...` directory for k, v in config.items(): if v == "...": raise Exception( f"Let's not keep any '...' config values. {k} is {v}!") return config
def test_apply_product_config_unknown_product(): """ `apply_product_config` raises an exception if a keyed option in the config doesn't have a value for the `cot_product` in the config. """ c = { "cot_product": "seamonkey", "keyed": { "by-cot-product": { "thunderbird": "expected", "firefox": "unexpected" } } } with pytest.raises(config.ConfigError): config.apply_product_config(c)
def build_config(override, basedir): randstring = slugid.nice()[0:6] config = get_unfrozen_copy(DEFAULT_CONFIG) GPG_HOME = os.path.join(os.path.dirname(__file__), "data", "gpg") ED25519_DIR = os.path.join(os.path.dirname(__file__), "data", "ed25519") config.update({ 'log_dir': os.path.join(basedir, "log"), 'artifact_dir': os.path.join(basedir, "artifact"), 'task_log_dir': os.path.join(basedir, "artifact", "public", "logs"), 'work_dir': os.path.join(basedir, "work"), "worker_type": "dummy-worker-{}".format(randstring), "worker_id": "dummy-worker-{}".format(randstring), 'artifact_upload_timeout': 60 * 2, 'gpg_home': GPG_HOME, "gpg_encoding": 'utf-8', "gpg_options": None, "gpg_path": os.environ.get("GPG_PATH", None), "gpg_public_keyring": os.path.join(GPG_HOME, "pubring.gpg"), "gpg_secret_keyring": os.path.join(GPG_HOME, "secring.gpg"), "gpg_use_agent": None, 'poll_interval': 5, 'reclaim_interval': 5, 'task_script': ('bash', '-c', '>&2 echo bar && echo foo && sleep 9 && exit 1'), 'task_max_timeout': 60, 'cot_product': 'firefox', 'ed25519_private_key_path': os.path.join(ED25519_DIR, 'scriptworker_private_key'), 'ed25519_public_keys': { 'docker-worker': ['8dBv4bbnZ3RsDzQiPKTJ18uo3hq5Rjm94JG6HXzAcBM='], 'generic-worker': ['PkI5NslA78wSsYaKNzKq7iD7MLQy7W6wYO/0WFd4tWM='], 'scriptworker': ['KxYrV3XAJ3uOyAUX0Wcl1Oeu6GSMrI/5hOn39q8Lf0I='], }, }) creds = read_integration_creds() del (config['credentials']) if isinstance(override, dict): config.update(override) with open(os.path.join(basedir, "config.json"), "w") as fh: json.dump(config, fh, indent=2, sort_keys=True) config = apply_product_config(config) return config, creds
def build_config(override, basedir): config = get_unfrozen_copy(DEFAULT_CONFIG) del (config['credentials']) if isinstance(override, dict): config.update(override) with open(os.path.join(basedir, "config.json"), "w") as fh: json.dump(config, fh, indent=2, sort_keys=True) config = apply_product_config(config) return config
def build_config(override, basedir): randstring = slugid.nice()[0:6].decode('utf-8') config = get_unfrozen_copy(DEFAULT_CONFIG) GPG_HOME = os.path.join(os.path.basename(__file__), "data", "gpg") config.update({ 'log_dir': os.path.join(basedir, "log"), 'artifact_dir': os.path.join(basedir, "artifact"), 'task_log_dir': os.path.join(basedir, "artifact", "public", "logs"), 'work_dir': os.path.join(basedir, "work"), "worker_type": "dummy-worker-{}".format(randstring), "worker_id": "dummy-worker-{}".format(randstring), 'artifact_upload_timeout': 60 * 2, 'gpg_home': GPG_HOME, "gpg_encoding": 'utf-8', "gpg_options": None, "gpg_path": os.environ.get("GPG_PATH", None), "gpg_public_keyring": os.path.join(GPG_HOME, "pubring.gpg"), "gpg_secret_keyring": os.path.join(GPG_HOME, "secring.gpg"), "gpg_use_agent": None, 'poll_interval': 5, 'reclaim_interval': 5, 'task_script': ('bash', '-c', '>&2 echo bar && echo foo && sleep 9 && exit 1'), 'task_max_timeout': 60, 'cot_product': 'firefox' }) creds = read_integration_creds() del (config['credentials']) if isinstance(override, dict): config.update(override) with open(os.path.join(basedir, "config.json"), "w") as fh: json.dump(config, fh, indent=2, sort_keys=True) config = apply_product_config(config) return config, creds
def test_get_context_from_cmdln(t_config): path = os.path.join(os.path.dirname(__file__), "data", "good.json") c = deepcopy(dict(DEFAULT_CONFIG)) with open(path) as fh: c.update(json.load(fh)) expected_creds = frozendict(c['credentials']) del(c['credentials']) expected_config = frozendict(config.apply_product_config(c)) def noop(*args, **kwargs): pass context, credentials = config.get_context_from_cmdln([path]) assert credentials == expected_creds assert context.config == expected_config
def test_get_context_from_cmdln(t_config): path = os.path.join(os.path.dirname(__file__), "data", "good.json") c = deepcopy(dict(DEFAULT_CONFIG)) with open(path) as fh: c.update(json.load(fh)) expected_creds = frozendict(c['credentials']) del (c['credentials']) expected_config = frozendict(config.apply_product_config(c)) def noop(*args, **kwargs): pass context, credentials = config.get_context_from_cmdln([path]) assert credentials == expected_creds assert context.config == expected_config
def _craft_rw_context(tmp, event_loop, cot_product, session): config = get_unfrozen_copy(DEFAULT_CONFIG) config['cot_product'] = cot_product context = Context() context.session = session context.config = apply_product_config(config) context.config['cot_job_type'] = "signing" for key, value in context.config.items(): if key.endswith("_dir"): context.config[key] = os.path.join(tmp, key) makedirs(context.config[key]) if key.endswith("key_path"): context.config[key] = os.path.join(tmp, key) context.config['verbose'] = VERBOSE context.event_loop = event_loop return context
def build_config(override, basedir): config = get_unfrozen_copy(DEFAULT_CONFIG) config.update( { "log_dir": os.path.join(basedir, "log"), "base_artifact_dir": os.path.join(basedir, "artifact"), "task_log_dir_template": os.path.join(basedir, "artifact", "public", "logs"), "base_work_dir": os.path.join(basedir, "work"), } ) del config["credentials"] if isinstance(override, dict): config.update(override) with open(os.path.join(basedir, "config.json"), "w") as fh: json.dump(config, fh, indent=2, sort_keys=True) config = apply_product_config(config) return config
def build_config(override, basedir): randstring = slugid.nice().lower().replace("_", "").replace("-", "")[:6] config = get_unfrozen_copy(DEFAULT_CONFIG) ED25519_DIR = os.path.join(os.path.dirname(__file__), "data", "ed25519") config.update({ "log_dir": os.path.join(basedir, "log"), "artifact_dir": os.path.join(basedir, "artifact"), "task_log_dir": os.path.join(basedir, "artifact", "public", "logs"), "work_dir": os.path.join(basedir, "work"), "worker_type": "dummy-worker-{}".format(randstring), "worker_id": "dummy-worker-{}".format(randstring), "artifact_upload_timeout": 60 * 2, "poll_interval": 5, "reclaim_interval": 5, "task_script": ("bash", "-c", ">&2 echo bar && echo foo && sleep 9 && exit 1"), "task_max_timeout": 60, "cot_product": "firefox", "ed25519_private_key_path": os.path.join(ED25519_DIR, "scriptworker_private_key"), "ed25519_public_keys": { "docker-worker": ["8dBv4bbnZ3RsDzQiPKTJ18uo3hq5Rjm94JG6HXzAcBM="], "generic-worker": ["PkI5NslA78wSsYaKNzKq7iD7MLQy7W6wYO/0WFd4tWM="], "scriptworker": ["KxYrV3XAJ3uOyAUX0Wcl1Oeu6GSMrI/5hOn39q8Lf0I="], }, }) creds = read_integration_creds() del config["credentials"] if isinstance(override, dict): config.update(override) with open(os.path.join(basedir, "config.json"), "w") as fh: json.dump(config, fh, indent=2, sort_keys=True) config = apply_product_config(config) return config, creds
def _craft_rw_context(tmp, event_loop, cot_product, session, private=False): config = get_unfrozen_copy(DEFAULT_CONFIG) config["cot_product"] = cot_product context = Context() context.session = session context.config = apply_product_config(config) context.config["cot_job_type"] = "scriptworker" for key, value in context.config.items(): if key.endswith("_dir"): context.config[key] = os.path.join(tmp, key) makedirs(context.config[key]) if key.endswith("key_path"): context.config[key] = os.path.join(tmp, key) if private: for rule in context.config["trusted_vcs_rules"]: rule["require_secret"] = True context.config["verbose"] = VERBOSE context.event_loop = event_loop return context
async def rw_context(request, event_loop): with tempfile.TemporaryDirectory() as tmp: config = get_unfrozen_copy(DEFAULT_CONFIG) config['cot_product'] = request.param context = Context() context.config = apply_product_config(config) context.config['gpg_lockfile'] = os.path.join(tmp, 'gpg_lockfile') context.config['cot_job_type'] = "signing" for key, value in context.config.items(): if key.endswith("_dir"): context.config[key] = os.path.join(tmp, key) makedirs(context.config[key]) if key.endswith("key_path") or key in ("gpg_home", ): context.config[key] = os.path.join(tmp, key) context.config['verbose'] = VERBOSE context.event_loop = event_loop yield context await _close_session(context) await _close_session(context.queue) await _close_session(context.temp_queue)
def test_apply_product_config(): """ `apply_product_config` returns configuration that replaces entries that have `by-cot-product` entries with the value corresponding to `cot_product` in the config. """ c = { "cot_product": "thunderbird", "unkeyed": "no keys", "keyed": { "by-cot-product": { "thunderbird": "expected", "firefox": "unexpected" } } } expected_config = { "cot_product": "thunderbird", "unkeyed": "no keys", "keyed": "expected" } assert config.apply_product_config(c) == expected_config
def test_apply_product_config(): """ `apply_product_config` returns configuration that replaces entries that have `by-cot-product` entries with the value corresponding to `cot_product` in the config. """ c = { 'cot_product': 'thunderbird', 'unkeyed': 'no keys', 'keyed': { 'by-cot-product': { 'thunderbird': 'expected', 'firefox': 'unexpected', } }, } expected_config = { 'cot_product': 'thunderbird', 'unkeyed': 'no keys', 'keyed': 'expected', } assert config.apply_product_config(c) == expected_config
def t_config(): return config.apply_product_config(dict(deepcopy(DEFAULT_CONFIG)))