def get_manifest(): rw_manifest = yaml.load_yaml(MANIFEST_PATH) graph_config = load_graph_config(ROOT) validate_schema(base_schema, deepcopy(rw_manifest), "Invalid manifest:") check_manifest(deepcopy(rw_manifest), graph_config) # TODO make read-only recursively return ReadOnlyDict(rw_manifest)
def loader(kind, path, config, params, loaded_tasks): """ Loads selected jobs from a different taskgraph hierarchy. This loads jobs of the given kind from the taskgraph rooted at `base-path`, and includes all the jobs with names or aliaes matching the names in the `jobs` key. """ base_path = config.pop('base-path') sub_path = os.path.join(base_path, kind) logger.debug("Reference loader: load tasks from {}".format(sub_path)) sub_config = load_yaml(sub_path, 'kind.yml') _loader = _get_loader(sub_path, sub_config) inputs = _loader(kind, sub_path, sub_config, params, loaded_tasks) jobs = config.pop('jobs', None) config.update(sub_config) if jobs is not None: jobs = set(jobs) return (job for job in inputs if (_get_aliases(kind, job) & jobs)) else: return inputs
def generate_update_line(config, jobs): """Resolve fields that can be keyed by platform, etc.""" release_config = get_release_config(config) for job in jobs: config_file = job.pop('whats-new-config') update_config = load_yaml(config_file) product = job['shipping-product'] # XXX On ESR68, even though they get filtered, we're building balrog tasks with the new # Fennec numbers. This solution is not ideal, but works at the moment. version = FennecVersion.parse(release_config['appVersion']) if product == 'devedition': product = 'firefox' job['worker']['update-line'] = {} for blob_type, suffix in [('wnp', ''), ('no-wnp', '-No-WNP')]: context = { 'release-type': config.params['release_type'], 'product': product, 'version': version, 'blob-type': blob_type, 'build-id': config.params['moz_build_date'], } job['worker']['update-line'][suffix] = generate_update_properties( context, update_config) yield job
def load_jobs(params, root): cron_yml = load_yaml(root, '.cron.yml') schema.validate(cron_yml) # resolve keyed_by fields in each job jobs = cron_yml['jobs'] return {j['name']: j for j in jobs}
def loader(kind, path, config, params, loaded_tasks): """ Generate tasks implementing Gecko tests. """ builds_by_platform = get_builds_by_platform(dep_kind="build", loaded_tasks=loaded_tasks) signed_builds_by_platform = get_builds_by_platform( dep_kind="build-signing", loaded_tasks=loaded_tasks) # get the test platforms for those build tasks test_platforms_cfg = load_yaml(path, "test-platforms.yml") test_platforms = get_test_platforms(test_platforms_cfg, builds_by_platform, signed_builds_by_platform) # expand the test sets for each of those platforms test_sets_cfg = load_yaml(path, "test-sets.yml") test_platforms = expand_tests(test_sets_cfg, test_platforms) # load the test descriptions tests = transform_loader(kind, path, config, params, loaded_tasks) test_descriptions = {t.pop("name"): t for t in tests} # generate all tests for all test platforms for test_platform_name, test_platform in test_platforms.items(): for test_name in test_platform["test-names"]: test = copy.deepcopy(test_descriptions[test_name]) test["build-platform"] = test_platform["build-platform"] test["test-platform"] = test_platform_name test["build-label"] = test_platform["build-label"] if test_platform.get("build-signing-label", None): test["build-signing-label"] = test_platform[ "build-signing-label"] test["build-attributes"] = test_platform["build-attributes"] test["test-name"] = test_name if test_platform.get("shippable"): test.setdefault("attributes", {})["shippable"] = True test["attributes"]["shipping_product"] = test_platform[ "shipping_product"] logger.debug("Generating tasks for test {} on platform {}".format( test_name, test["test-platform"])) yield test
def load_graph_config(root_dir): config_yml = os.path.join(root_dir, "config.yml") if not os.path.exists(config_yml): raise Exception(f"Couldn't find taskgraph configuration: {config_yml}") logger.debug(f"loading config from `{config_yml}`") config = load_yaml(config_yml) logger.debug("validating the graph config.") validate_graph_config(config) return GraphConfig(config=config, root_dir=root_dir)
def load(cls, root_dir, graph_config, kind_name): path = os.path.join(root_dir, kind_name) kind_yml = os.path.join(path, "kind.yml") if not os.path.exists(kind_yml): raise KindNotFound(kind_yml) logger.debug(f"loading kind `{kind_name}` from `{path}`") config = load_yaml(kind_yml) return cls(kind_name, path, config, graph_config)
def test_write_artifact_yml(self): data = [{"some": "data"}] tmpdir = tempfile.mkdtemp() try: decision.ARTIFACTS_DIR = os.path.join(tmpdir, "artifacts") decision.write_artifact("artifact.yml", data) self.assertEqual(load_yaml(decision.ARTIFACTS_DIR, "artifact.yml"), data) finally: if os.path.exists(tmpdir): shutil.rmtree(tmpdir) decision.ARTIFACTS_DIR = "artifacts"
def test_load(self): with MockedOpen({ "/dir1/dir2/foo.yml": dedent("""\ prop: - val1 """) }): self.assertEqual(yaml.load_yaml("/dir1/dir2", "foo.yml"), {"prop": ["val1"]})
def get_manifest(): manifest_paths = glob.glob(os.path.join(MANIFEST_DIR, "*.yml")) all_manifests = {} for path in manifest_paths: rw_manifest = yaml.load_yaml(path) manifest_name = os.path.basename(path).replace(".yml", "") rw_manifest["manifest_name"] = manifest_name validate_schema(base_schema, deepcopy(rw_manifest), "Invalid manifest:") check_manifest(deepcopy(rw_manifest)) assert manifest_name not in all_manifests all_manifests[manifest_name] = rw_manifest return ReadOnlyDict(all_manifests)
def read_artifact(filename): path = os.path.join(ARTIFACTS_DIR, filename) if filename.endswith('.yml'): return load_yaml(path, filename) elif filename.endswith('.json'): with open(path, 'r') as f: return json.load(f) elif filename.endswith('.gz'): import gzip with gzip.open(path, 'rb') as f: return json.load(f) else: raise TypeError("Don't know how to read {}".format(filename))
def get_manifest(): manifest_paths = glob.glob(os.path.join(MANIFEST_DIR, "*.yml")) all_manifests = {} graph_config = load_graph_config(ROOT) for path in manifest_paths: rw_manifest = yaml.load_yaml(path) manifest_name = os.path.basename(path).replace(".yml", "") rw_manifest["manifest_name"] = manifest_name validate_schema(base_schema, deepcopy(rw_manifest), "Invalid manifest:") check_manifest(deepcopy(rw_manifest), graph_config) rw_manifest["artifacts"] = tuple(rw_manifest["artifacts"]) assert manifest_name not in all_manifests all_manifests[manifest_name] = ReadOnlyDict(rw_manifest) return ReadOnlyDict(all_manifests)
def read_artifact(filename): path = os.path.join(ARTIFACTS_DIR, filename) if filename.endswith(".yml"): return load_yaml(path, filename) elif filename.endswith(".json"): with open(path) as f: return json.load(f) elif filename.endswith(".gz"): import gzip with gzip.open(path, "rb") as f: return json.load(f) else: raise TypeError(f"Don't know how to read {filename}")
def test_key_order(self): with MockedOpen({ "/dir1/dir2/foo.yml": dedent("""\ job: foo: 1 bar: 2 xyz: 3 """) }): self.assertEqual( list(yaml.load_yaml("/dir1/dir2", "foo.yml")["job"].keys()), ["foo", "bar", "xyz"], )
def make_decision_task(params, root, symbol, arguments=[]): """Generate a basic decision task, based on the root .taskcluster.yml""" taskcluster_yml = load_yaml(root, '.taskcluster.yml') push_info = find_hg_revision_push_info(params['repository_url'], params['head_rev']) # provide a similar JSON-e context to what mozilla-taskcluster provides: # https://docs.taskcluster.net/reference/integrations/mozilla-taskcluster/docs/taskcluster-yml # but with a different tasks_for and an extra `cron` section context = { 'tasks_for': 'cron', 'repository': { 'url': params['repository_url'], 'project': params['project'], 'level': params['level'], }, 'push': { 'revision': params['head_rev'], # remainder are fake values, but the decision task expects them anyway 'pushlog_id': push_info['pushid'], 'pushdate': push_info['pushdate'], 'owner': 'cron', }, 'cron': { 'task_id': os.environ.get('TASK_ID', '<cron task id>'), 'job_name': params['job_name'], 'job_symbol': symbol, # args are shell-quoted since they are given to `bash -c` 'quoted_args': ' '.join(pipes.quote(a) for a in arguments), }, 'now': current_json_time(), 'ownTaskId': slugid.nice(), } rendered = jsone.render(taskcluster_yml, context) if len(rendered['tasks']) != 1: raise Exception( "Expected .taskcluster.yml to only produce one cron task") task = rendered['tasks'][0] task_id = task.pop('taskId') return (task_id, task)
def reference_loader(kind, path, config, params, loaded_tasks): """ Loads selected jobs from a different taskgraph hierarchy. This loads jobs of the given kind from the taskgraph rooted at `base-path`, and includes all the jobs with names or aliaes matching the names in the `jobs` key. """ base_path = config.pop('base-path') sub_path = os.path.join(base_path, kind) sub_config = load_yaml(sub_path, "kind.yml") loader = _get_loader(sub_path, sub_config) inputs = loader(kind, sub_path, sub_config, params, loaded_tasks) jobs = set(config.pop('jobs')) config.update(sub_config) return (job for job in inputs if (_get_aliases(kind, job) & jobs))
def jobs(): defaults = config.get("job-defaults") for name, job in config.get("jobs", {}).items(): if defaults: job = merge(defaults, job) job["job-from"] = "kind.yml" yield name, job for filename in config.get("jobs-from", []): tasks = load_yaml(path, filename) file_defaults = tasks.pop("job-defaults", None) if defaults: file_defaults = merge(defaults, file_defaults or {}) for name, job in tasks.items(): if file_defaults: job = merge(file_defaults, job) job["job-from"] = filename yield name, job
def generate_update_line(config, jobs): """Resolve fields that can be keyed by platform, etc.""" release_config = get_release_config(config) for job in jobs: config_file = job.pop('whats-new-config') update_config = load_yaml(config_file) product = job['shipping-product'] if product == 'devedition': product = 'firefox' job['worker']['update-line'] = {} for blob_type, suffix in [('wnp', ''), ('no-wnp', '-No-WNP')]: context = { 'release-type': config.params['release_type'], 'product': product, 'version': GeckoVersion.parse(release_config['appVersion']), 'blob-type': blob_type, } job['worker']['update-line'][suffix] = generate_update_properties( context, update_config) yield job
def reference_loader(kind, path, config, params, loaded_tasks): """ Loads selected jobs from a different taskgraph hierarchy. This loads jobs of the given kind from the taskgraph rooted at `base-path`, and includes all the jobs with names or aliaes matching the names in the `jobs` key. """ base_path = config.pop('base-path') sub_path = os.path.join(base_path, kind) sub_config = load_yaml(sub_path, "kind.yml") loader = _get_loader(sub_path, sub_config) inputs = loader(kind, sub_path, sub_config, params, loaded_tasks) jobs = config.pop('jobs', None) config.update(sub_config) if jobs is not None: jobs = set(jobs) return (job for job in inputs if (_get_aliases(kind, job) & jobs)) else: return inputs
def generate_update_line(config, jobs): """Resolve fields that can be keyed by platform, etc.""" release_config = get_release_config(config) for job in jobs: config_file = job.pop("whats-new-config") update_config = load_yaml(config_file) product = job["shipping-product"] if product == "devedition": product = "firefox" job["worker"]["update-line"] = {} for blob_type, suffix in [("wnp", ""), ("no-wnp", "-No-WNP")]: context = { "release-type": config.params["release_type"], "product": product, "version": GeckoVersion.parse(release_config["appVersion"]), "blob-type": blob_type, "build-id": config.params["moz_build_date"], } job["worker"]["update-line"][suffix] = generate_update_properties( context, update_config ) yield job
def register(self, jobs_config_path, image_dir): """Register additional image_paths. In this case, there is no 'jobs' key in the loaded YAML as this file is loaded via jobs-from in kind.yml.""" jobs = load_yaml(GECKO, jobs_config_path) self.__update_image_paths(jobs, image_dir)
def __init__(self, config_path, image_dir=IMAGE_DIR): config = load_yaml(GECKO, config_path) self.__update_image_paths(config["jobs"], image_dir)
class TestTaskclusterYml(unittest.TestCase): taskcluster_yml = load_yaml(GECKO, ".taskcluster.yml") def test_push(self): context = { "tasks_for": "hg-push", "push": { "revision": "e8d2d9aff5026ef1f1777b781b47fdcbdb9d8f20", "owner": "*****@*****.**", "pushlog_id": 1556565286, "pushdate": 112957, }, "repository": { "url": "https://hg.mozilla.org/mozilla-central", "project": "mozilla-central", "level": "3", }, "ownTaskId": slugid.nice().encode("ascii"), } rendered = jsone.render(self.taskcluster_yml, context) pprint.pprint(rendered) self.assertEqual(rendered["tasks"][0]["metadata"]["name"], "Gecko Decision Task") def test_cron(self): context = { "tasks_for": "cron", "repository": { "url": "https://hg.mozilla.org/mozilla-central", "project": "mozilla-central", "level": 3, }, "push": { "revision": "e8aebe488b2f2e567940577de25013d00e818f7c", "pushlog_id": -1, "pushdate": 0, "owner": "cron", }, "cron": { "task_id": "<cron task id>", "job_name": "test", "job_symbol": "T", "quoted_args": "abc def", }, "now": current_json_time(), "ownTaskId": slugid.nice().encode("ascii"), } rendered = jsone.render(self.taskcluster_yml, context) pprint.pprint(rendered) self.assertEqual(rendered["tasks"][0]["metadata"]["name"], "Decision Task for cron job test") def test_action(self): context = { "tasks_for": "action", "repository": { "url": "https://hg.mozilla.org/mozilla-central", "project": "mozilla-central", "level": 3, }, "push": { "revision": "e8d2d9aff5026ef1f1777b781b47fdcbdb9d8f20", "owner": "*****@*****.**", "pushlog_id": 1556565286, "pushdate": 112957, }, "action": { "name": "test-action", "title": "Test Action", "description": "Just testing", "taskGroupId": slugid.nice().encode("ascii"), "symbol": "t", "repo_scope": "assume:repo:hg.mozilla.org/try:action:generic", "cb_name": "test_action", }, "input": {}, "parameters": {}, "now": current_json_time(), "taskId": slugid.nice().encode("ascii"), "ownTaskId": slugid.nice().encode("ascii"), "clientId": "testing/testing/testing", } rendered = jsone.render(self.taskcluster_yml, context) pprint.pprint(rendered) self.assertEqual(rendered["tasks"][0]["metadata"]["name"], "Action: Test Action") def test_unknown(self): context = {"tasks_for": "bitkeeper-push"} rendered = jsone.render(self.taskcluster_yml, context) pprint.pprint(rendered) self.assertEqual(rendered["tasks"], [])
def taskcluster_yml(self): return load_yaml(GECKO, ".taskcluster.yml")
def test_load(self): with MockedOpen({'/dir1/dir2/foo.yml': FOO_YML}): self.assertEqual(yaml.load_yaml("/dir1/dir2", "foo.yml"), {'prop': ['val1']})
def read_taskcluster_yml(filename): """Load and parse .taskcluster.yml, memoized to save some time""" return yaml.load_yaml(filename)
def read_taskcluster_yml(filename): '''Load and parse .taskcluster.yml, memoized to save some time''' return yaml.load_yaml(*os.path.split(filename))
def test_load(self): with MockedOpen({'/dir1/dir2/foo.yml': FOO_YML}): self.assertEqual(yaml.load_yaml("/dir1/dir2", "foo.yml"), {'prop': ['val1']})
def test_load(self): with MockedOpen({"/dir1/dir2/foo.yml": FOO_YML}): self.assertEqual(yaml.load_yaml("/dir1/dir2", "foo.yml"), {"prop": ["val1"]})