async def build_task_dependencies(chain, task, name, my_task_id): """Recursively build the task dependencies of a task. Args: chain (ChainOfTrust): the chain of trust to add to. task (dict): the task definition to operate on. name (str): the name of the task to operate on. my_task_id (str): the taskId of the task to operate on. Raises: CoTError: on failure. """ log.info("build_task_dependencies {} {}".format(name, my_task_id)) if name.count(':') > 5: raise CoTError("Too deep recursion!\n{}".format(name)) sorted_dependencies = find_sorted_task_dependencies(task, name, my_task_id) for task_name, task_id in sorted_dependencies: if task_id not in chain.dependent_task_ids(): link = LinkOfTrust(chain.context, task_name, task_id) json_path = link.get_artifact_full_path('task.json') try: task_defn = await chain.context.queue.task(task_id) link.task = task_defn chain.links.append(link) # write task json to disk makedirs(os.path.dirname(json_path)) with open(json_path, 'w') as fh: fh.write(format_json(task_defn)) await build_task_dependencies(chain, task_defn, task_name, task_id) except TaskclusterFailure as exc: raise CoTError(str(exc))
def _set(self, prop_name, value): prev = getattr(self, prop_name) if prev is not None: raise CoTError( "LinkOfTrust {}: re-setting {} to {} when it is already set to {}!" .format(str(self.name), prop_name, value, prev)) return setattr(self, prop_name, value)
def verify_task(self): """Run some task sanity checks on ``self.task``.""" for upstream_artifact in self.task.get("payload", {}).get("upstreamArtifacts", []): task_id = upstream_artifact["taskId"] for path in upstream_artifact["paths"]: if os.path.isabs(path) or '..' in path: raise CoTError("upstreamArtifacts taskId {} has illegal path {}!".format(task_id, path))
def verify_cot_signatures(chain): """Verify the signatures of the chain of trust artifacts populated in ``download_cot``. Populate each link.cot with the chain of trust json body. Args: chain (ChainOfTrust): the chain of trust to add to. Raises: CoTError: on failure. """ for link in chain.links: path = link.get_artifact_full_path('public/chainOfTrust.json.asc') gpg_home = os.path.join(chain.context.config['base_gpg_home_dir'], link.worker_impl) gpg = GPG(chain.context, gpg_home=gpg_home) log.debug( "Verifying the {} {} chain of trust signature against {}".format( link.name, link.task_id, gpg_home)) try: with open(path, "r") as fh: contents = fh.read() except OSError as exc: raise CoTError("Can't read {}: {}!".format(path, str(exc))) try: # TODO remove verify_sig pref and kwarg when git repo pubkey # verification works reliably! body = get_body( gpg, contents, verify_sig=chain.context.config['verify_cot_signature']) except ScriptWorkerGPGException as exc: raise CoTError( "GPG Error verifying chain of trust for {}: {}!".format( path, str(exc))) link.cot = load_json( body, exception=CoTError, message="{} {}: Invalid cot json body! %(exc)s".format( link.name, link.task_id)) unsigned_path = link.get_artifact_full_path('chainOfTrust.json') log.debug("Good. Writing json contents to {}".format(unsigned_path)) with open(unsigned_path, "w") as fh: fh.write(format_json(link.cot))
async def download_cot_artifact(chain, task_id, path): """Download an artifact and verify its SHA against the chain of trust. Args: chain (ChainOfTrust): the chain of trust object task_id (str): the task ID to download from path (str): the relative path to the artifact to download Returns: str: the full path of the downloaded artifact Raises: CoTError: on failure. """ link = chain.get_link(task_id) log.debug("Verifying {} is in {} cot artifacts...".format(path, task_id)) if path not in link.cot['artifacts']: raise CoTError("path {} not in {} {} chain of trust artifacts!".format( path, link.name, link.task_id)) url = get_artifact_url(chain.context, task_id, path) log.info("Downloading Chain of Trust artifact:\n{}".format(url)) await download_artifacts(chain.context, [url], parent_dir=link.cot_dir, valid_artifact_task_ids=[task_id]) full_path = link.get_artifact_full_path(path) for alg, expected_sha in link.cot['artifacts'][path].items(): if alg not in chain.context.config['valid_hash_algorithms']: raise CoTError("BAD HASH ALGORITHM: {}: {} {}!".format( link.name, alg, full_path)) real_sha = get_hash(full_path, hash_alg=alg) if expected_sha != real_sha: raise CoTError("BAD HASH: {}: Expected {} {}; got {}!".format( link.name, alg, expected_sha, real_sha)) log.debug("{} matches the expected {} {}".format( full_path, alg, expected_sha)) return full_path
def raise_on_errors(errors, level=logging.CRITICAL): """Raise a CoTError if errors. Helper function because I had this code block everywhere. Args: errors (list): the error errors level (int, optional): the log level to use. Defaults to logging.CRITICAL Raises: CoTError: if errors is non-empty """ if errors: log.log(level, "\n".join(errors)) raise CoTError("\n".join(errors))
def get_link(self, task_id): """Get a ``LinkOfTrust`` by task id. Args: task_id (str): the task id to find. Returns: LinkOfTrust: the link matching the task id. Raises: CoTError: if no ``LinkOfTrust`` matches. """ links = [x for x in self.links if x.task_id == task_id] if len(links) != 1: raise CoTError("No single Link matches task_id {}!\n{}".format( task_id, self.dependent_task_ids())) return links[0]
def guess_task_type(name): """Guess the task type of the task. Args: name (str): the name of the task. Returns: str: the task_type. Raises: CoTError: on invalid task_type. """ parts = name.split(':') task_type = parts[-1] if task_type not in get_valid_task_types(): raise CoTError("Invalid task type for {}!".format(name)) return task_type
async def verify_worker_impls(chain): """Verify the task type (e.g. decision, build) of each link in the chain. Args: chain (ChainOfTrust): the chain we're operating on Raises: CoTError: on failure """ valid_worker_impls = get_valid_worker_impls() for obj in [chain] + chain.links: worker_impl = obj.worker_impl log.info("Verifying {} {} as a {} task...".format( obj.name, obj.task_id, worker_impl)) # Run tests synchronously for now. We can parallelize if efficiency # is more important than a single simple logfile. await valid_worker_impls[worker_impl](chain, obj) if isinstance(obj, ChainOfTrust) and obj.worker_impl != "scriptworker": raise CoTError("ChainOfTrust object is not a scriptworker impl!")
async def verify_chain_of_trust(chain): """Build and verify the chain of trust. Args: chain (ChainOfTrust): the chain we're operating on Raises: CoTError: on failure """ log_path = os.path.join(chain.context.config["task_log_dir"], "chain_of_trust.log") with contextual_log_handler( chain.context, path=log_path, log_obj=log, formatter=AuditLogFormatter( fmt=chain.context.config['log_fmt'], datefmt=chain.context.config['log_datefmt'], )): try: # build LinkOfTrust objects await build_task_dependencies(chain, chain.task, chain.name, chain.task_id) # download the signed chain of trust artifacts await download_cot(chain) # verify the signatures and populate the ``link.cot``s verify_cot_signatures(chain) # download all other artifacts needed to verify chain of trust await download_firefox_cot_artifacts(chain) # verify the task types, e.g. decision task_count = await verify_task_types(chain) check_num_tasks(chain, task_count) # verify the worker_impls, e.g. docker-worker await verify_worker_impls(chain) await trace_back_to_firefox_tree(chain) except (DownloadError, KeyError, AttributeError) as exc: log.critical("Chain of Trust verification error!", exc_info=True) if isinstance(exc, CoTError): raise else: raise CoTError(str(exc)) log.info("Good.")
async def die_async(*args, **kwargs): raise CoTError("x")