def _find_target_path(taskcluster_path, artifact_map): target_path = None for map_ in artifact_map: if taskcluster_path in map_["paths"]: destinations = map_["paths"][taskcluster_path]["destinations"] candidate_destination = get_single_item_from_sequence( sequence=destinations, condition=lambda _: True, ErrorClass=TaskVerificationError, no_item_error_message= f'Path "{taskcluster_path}" has no destination defined', too_many_item_error_message= f'Path "{taskcluster_path}" has too many destinations', ) if target_path is not None: raise TaskVerificationError( f'Path "{taskcluster_path}" was already defined elsewhere in `artifactMap`. ' "Previous value: {target_path}. New value: {candidate_destination}" ) target_path = candidate_destination if target_path is None: raise TaskVerificationError( f'Path "{taskcluster_path}" is not present in artifactMap') return target_path
def get_release_config(product_config, task_payload, config): # support repo override for xpi-manifest, but support original workflow for fenix if product_config.get("allow_github_repo_override", False): if not task_payload.get("githubOwner", ""): raise TaskVerificationError("missing githubOwner from task") if not task_payload.get("githubRepoName", ""): raise TaskVerificationError("missing githubRepoName from task") owner = task_payload["githubOwner"] repo_name = task_payload["githubRepoName"] else: if not product_config.get("github_owner", ""): raise TaskVerificationError("missing github_owner from config") if not product_config.get("github_repo_name", ""): raise TaskVerificationError("missing github_repo_name from config") owner = product_config["github_owner"] repo_name = product_config["github_repo_name"] return { "artifacts": _get_artifacts(task_payload, config), "contact_github": product_config["contact_github"], "git_revision": task_payload["gitRevision"], "git_tag": task_payload["gitTag"], "github_owner": owner, "github_repo_name": repo_name, "github_token": product_config["github_token"], "is_prerelease": task_payload["isPrerelease"], "release_name": task_payload["releaseName"], }
def verify_task_schema(config, task, schema_key="schema_file"): """Verify the task definition. Args: config (dict): the running config task (dict): the running task schema_key: the key in `config` where the path to the schema file is. Key can contain dots (e.g.: 'schema_files.file_a') Raises: TaskVerificationError: if the task doesn't match the schema """ schema_path = config schema_keys = schema_key.split(".") try: for key in schema_keys: schema_path = schema_path[key] task_schema = load_json_or_yaml(schema_path, is_path=True) log.debug("Task is verified against this schema: {}".format(task_schema)) verify_json_schema(task, task_schema) except (KeyError, OSError) as e: raise TaskVerificationError( "Cannot verify task against schema. Task: {}.".format(task) ) from e
def verify_json_schema(data, schema, name="task"): """Given data and a jsonschema, let's verify it. This happens for tasks and chain of trust artifacts. Args: data (dict): the json to verify. schema (dict): the jsonschema to verify against. name (str, optional): the name of the json, for exception messages. Defaults to "task". Raises: TaskVerificationError: on failure """ try: jsonschema.validate(data, schema) except jsonschema.exceptions.ValidationError as exc: raise TaskVerificationError("Can't verify {} schema!\n{}".format( name, str(exc))) from exc
def check_action_is_allowed(project_config, action): if action not in project_config["allowed_actions"]: raise TaskVerificationError( f'Action "{action}" is not allowed for this project')