def test_load_json_or_yaml(string, is_path, exception, raises, result): if raises: with pytest.raises(exception): utils.load_json_or_yaml(string, is_path=is_path, exception=exception) else: for file_type in ("json", "yaml"): assert result == utils.load_json_or_yaml(string, is_path=is_path, exception=exception, file_type=file_type)
def test_load_json_or_yaml(string, is_path, exception, raises, result): if raises: with pytest.raises(exception): utils.load_json_or_yaml(string, is_path=is_path, exception=exception) else: for file_type in ("json", "yaml"): assert result == utils.load_json_or_yaml( string, is_path=is_path, exception=exception, file_type=file_type )
def validate_task_schema(context, schema_key='schema_file'): """Validate the task definition. Args: context (scriptworker.context.Context): the scriptworker context. It must contain a task and the config pointing to the schema file schema_key: the key in `context.config` where the path to the schema file is. Key can contain dots (e.g.: 'schema_files.file_a'), in which case Raises: TaskVerificationError: if the task doesn't match the schema """ schema_path = context.config schema_keys = schema_key.split('.') for key in schema_keys: schema_path = schema_path[key] task_schema = load_json_or_yaml(schema_path, is_path=True) log.debug('Task is validated against this schema: {}'.format(task_schema)) try: validate_json_schema(context.task, task_schema) except ScriptWorkerTaskException as e: raise TaskVerificationError( 'Cannot validate task against schema. Task: {}.'.format( context.task)) from e
def generate_cot(context, parent_path=None): """Format and sign the cot body, and write to disk. Args: context (scriptworker.context.Context): the scriptworker context. parent_path (str, optional): The directory to write the chain of trust artifacts to. If None, this is ``artifact_dir/public/``. Defaults to None. Returns: str: the contents of the chain of trust artifact. Raises: ScriptWorkerException: on schema error. """ body = generate_cot_body(context) schema = load_json_or_yaml( context.config['cot_schema_path'], is_path=True, exception=ScriptWorkerException, message="Can't read schema file {}: %(exc)s".format(context.config['cot_schema_path']) ) validate_json_schema(body, schema, name="chain of trust") body = format_json(body) parent_path = parent_path or os.path.join(context.config['artifact_dir'], 'public') unsigned_path = os.path.join(parent_path, 'chain-of-trust.json') write_to_file(unsigned_path, body) if context.config['sign_chain_of_trust']: ed25519_signature_path = '{}.sig'.format(unsigned_path) ed25519_private_key = ed25519_private_key_from_file(context.config['ed25519_private_key_path']) ed25519_signature = ed25519_private_key.sign(body.encode('utf-8')) write_to_file(ed25519_signature_path, ed25519_signature, file_type='binary') return body
def read_worker_creds(key="credentials"): """Get credentials from CREDS_FILES or the environment. This looks at the CREDS_FILES in order, and falls back to the environment. Args: key (str, optional): each CREDS_FILE is a json dict. This key's value contains the credentials. Defaults to 'credentials'. Returns: dict: the credentials found. None if no credentials found. """ for path in CREDS_FILES: if not os.path.exists(path): continue contents = load_json_or_yaml(path, is_path=True, exception=None) if contents.get(key): return contents[key] else: if key == "credentials" and os.environ.get("TASKCLUSTER_ACCESS_TOKEN") and \ os.environ.get("TASKCLUSTER_CLIENT_ID"): credentials = { "accessToken": os.environ["TASKCLUSTER_ACCESS_TOKEN"], "clientId": os.environ["TASKCLUSTER_CLIENT_ID"], } if os.environ.get("TASKCLUSTER_CERTIFICATE"): credentials['certificate'] = os.environ['TASKCLUSTER_CERTIFICATE'] return credentials
def validate_task_schema(context, schema_key='schema_file'): """Validate the task definition. Args: context (scriptworker.context.Context): the scriptworker context. It must contain a task and the config pointing to the schema file schema_key: the key in `context.config` where the path to the schema file is. Key can contain dots (e.g.: 'schema_files.file_a'), in which case Raises: TaskVerificationError: if the task doesn't match the schema """ schema_path = context.config schema_keys = schema_key.split('.') for key in schema_keys: schema_path = schema_path[key] task_schema = load_json_or_yaml(schema_path, is_path=True) log.debug('Task is validated against this schema: {}'.format(task_schema)) try: validate_json_schema(context.task, task_schema) except ScriptWorkerTaskException as e: raise TaskVerificationError('Cannot validate task against schema. Task: {}.'.format(context.task)) from e
def read_worker_creds(key="credentials"): """Get credentials from CREDS_FILES or the environment. This looks at the CREDS_FILES in order, and falls back to the environment. Args: key (str, optional): each CREDS_FILE is a json dict. This key's value contains the credentials. Defaults to 'credentials'. Returns: dict: the credentials found. None if no credentials found. """ for path in CREDS_FILES: if not os.path.exists(path): continue contents = load_json_or_yaml(path, is_path=True, exception=None) if contents.get(key): return contents[key] else: if key == "credentials" and os.environ.get("TASKCLUSTER_ACCESS_TOKEN") and \ os.environ.get("TASKCLUSTER_CLIENT_ID"): credentials = { "accessToken": os.environ["TASKCLUSTER_ACCESS_TOKEN"], "clientId": os.environ["TASKCLUSTER_CLIENT_ID"], } if os.environ.get("TASKCLUSTER_CERTIFICATE"): credentials['certificate'] = os.environ['TASKCLUSTER_CERTIFICATE'] return credentials
def get_task(config): """Read the task.json from work_dir. Args: config (dict): the running config, to find work_dir. Returns: dict: the contents of task.json Raises: ScriptWorkerTaskException: on error. """ path = os.path.join(config['work_dir'], "task.json") message = "Can't read task from {}!\n%(exc)s".format(path) contents = load_json_or_yaml(path, is_path=True, message=message) return contents
def _init_context(config_path=None, default_config=None): context = Context() # This prevents *script from pasting the whole context. context.write_json = noop_sync if config_path is None: if len(sys.argv) != 2: _usage() config_path = sys.argv[1] context.config = {} if default_config is None else default_config context.config.update(load_json_or_yaml(config_path, is_path=True)) context.task = get_task(context.config) return context
def get_task(config): """Read the task.json from work_dir. Args: config (dict): the running config, to find work_dir. Returns: dict: the contents of task.json Raises: ScriptWorkerTaskException: on error. """ path = os.path.join(config['work_dir'], "task.json") message = "Can't read task from {}!\n%(exc)s".format(path) contents = load_json_or_yaml(path, is_path=True, message=message) return contents
def _init_context(config_path=None, default_config=None): context = Context() # This prevents *script from overwriting json on disk context.write_json = lambda *args: None context.write_json() # for coverage if config_path is None: if len(sys.argv) != 2: _usage() config_path = sys.argv[1] context.config = {} if default_config is None else default_config context.config.update(load_json_or_yaml(config_path, is_path=True)) context.task = get_task(context.config) return context
def _init_context(config_path=None, default_config=None): context = Context() # This prevents *script from overwriting json on disk context.write_json = lambda *args: None context.write_json() # for coverage if config_path is None: if len(sys.argv) != 2: _usage() config_path = sys.argv[1] context.config = {} if default_config is None else default_config context.config.update(load_json_or_yaml(config_path, is_path=True)) context.task = get_task(context.config) return context
def _init_context(config_path: Optional[str] = None, default_config: Optional[Dict[str, Any]] = None) -> Any: context = Context() # type: Any # This prevents *script from overwriting json on disk context.write_json = lambda *args: None # call it for coverage context.write_json() # type: ignore if config_path is None: if len(sys.argv) != 2: _usage() config_path = sys.argv[1] context.config = {} if default_config is None else default_config context.config.update(load_json_or_yaml(config_path, is_path=True)) context.task = get_task(context.config) return context
def generate_cot(context, parent_path=None): """Format and sign the cot body, and write to disk. Args: context (scriptworker.context.Context): the scriptworker context. parent_path (str, optional): The directory to write the chain of trust artifacts to. If None, this is ``artifact_dir/public/``. Defaults to None. Returns: str: the contents of the chain of trust artifact. Raises: ScriptWorkerException: on schema error. """ body = generate_cot_body(context) schema = load_json_or_yaml( context.config['cot_schema_path'], is_path=True, exception=ScriptWorkerException, message="Can't read schema file {}: %(exc)s".format( context.config['cot_schema_path'])) validate_json_schema(body, schema, name="chain of trust") body = format_json(body) parent_path = parent_path or os.path.join(context.config['artifact_dir'], 'public') asc_path = os.path.join(parent_path, "chainOfTrust.json.asc") unsigned_path = os.path.join(parent_path, 'chain-of-trust.json') write_to_file(unsigned_path, body) if context.config['sign_chain_of_trust']: ed25519_signature_path = '{}.sig'.format(unsigned_path) ed25519_private_key = ed25519_private_key_from_file( context.config['ed25519_private_key_path']) ed25519_signature = ed25519_private_key.sign(body.encode('utf-8')) write_to_file(ed25519_signature_path, ed25519_signature, file_type='binary') body = sign(GPG(context), body) write_to_file(asc_path, body) return body