def fake_loader(kind, path, config, parameters, loaded_tasks): for i in range(3): dependencies = {} if i >= 1: dependencies["prev"] = "{}-t-{}".format(kind, i - 1) task = { "kind": kind, "label": "{}-t-{}".format(kind, i), "description": "{} task {}".format(kind, i), "attributes": { "_tasknum": six.text_type(i) }, "task": { "i": i, "metadata": { "name": "t-{}".format(i) }, "deadline": "soon", }, "dependencies": dependencies, } if "job-defaults" in config: task = merge(config["job-defaults"], task) yield task
def loader(kind, path, config, params, loaded_tasks): # Build everything unless we have an optimization strategy (defined below). files_changed = [] affected_components = ALL_COMPONENTS if params["tasks_for"] == "github-pull-request": logger.info("Processing pull request %s" % params["pull_request_number"]) files_changed = get_files_changed_pr(params["base_repository"], params["pull_request_number"]) affected_components = get_affected_components( files_changed, config.get("files-affecting-components")) elif params["tasks_for"] == "github-push": if params["base_rev"] in _GIT_ZERO_HASHES: logger.warn( "base_rev is a zero hash, meaning there is no previous push. Building every component..." ) else: logger.info("Processing push for commit range %s -> %s" % (params["base_rev"], params["head_rev"])) files_changed = get_files_changed_push(params["base_repository"], params["base_rev"], params["head_rev"]) affected_components = get_affected_components( files_changed, config.get("files-affecting-components")) logger.info("Files changed: %s" % " ".join(files_changed)) if affected_components is ALL_COMPONENTS: logger.info("Affected components: ALL") else: logger.info("Affected components: %s" % " ".join(affected_components)) not_for_components = config.get("not-for-components", []) jobs = { '{}{}'.format('' if build_type == 'regular' else build_type + '-', component['name']): { 'attributes': { 'build-type': build_type, 'component': component['name'], } } for component in get_components() for build_type in ('regular', 'nightly', 'release') if ((affected_components is ALL_COMPONENTS or component['name'] in affected_components) and component['name'] not in not_for_components and ( component['shouldPublish'] or build_type == 'regular')) } # Filter away overridden jobs that we wouldn't build anyways to avoid ending up with # partial job entries. overridden_jobs = { k: v for k, v in config.pop('overriden-jobs', {}).items() if affected_components is ALL_COMPONENTS or k in jobs.keys() } jobs = merge(jobs, overridden_jobs) config['jobs'] = jobs return base_loader(kind, path, config, params, loaded_tasks)
def split_variants(config, tests): for test in tests: variants = test.pop('variants') yield copy.deepcopy(test) for name in variants: testv = copy.deepcopy(test) variant = TEST_VARIANTS[name] if 'filterfn' in variant and not variant['filterfn'](testv): continue testv['attributes']['unittest_variant'] = name testv['description'] = variant['description'].format(**testv) suffix = '-' + variant['suffix'] testv['test-name'] += suffix testv['try-name'] += suffix group, symbol = split_symbol(testv['treeherder-symbol']) if group != '?': group += suffix else: symbol += suffix testv['treeherder-symbol'] = join_symbol(group, symbol) testv.update(variant.get('replace', {})) yield merge(testv, variant.get('merge', {}))
def test_merge(self): first = {'a': 1, 'b': 2, 'd': 11} second = {'b': 20, 'c': 30} third = {'c': 300, 'd': 400} expected = { 'a': 1, 'b': 20, 'c': 300, 'd': 400, } self.assertEqual(merge(first, second, third), expected) # inputs haven't changed.. self.assertEqual(first, {'a': 1, 'b': 2, 'd': 11}) self.assertEqual(second, {'b': 20, 'c': 30}) self.assertEqual(third, {'c': 300, 'd': 400})
def test_merge(self): first = {"a": 1, "b": 2, "d": 11} second = {"b": 20, "c": 30} third = {"c": 300, "d": 400} expected = { "a": 1, "b": 20, "c": 300, "d": 400, } self.assertEqual(merge(first, second, third), expected) # inputs haven't changed.. self.assertEqual(first, {"a": 1, "b": 2, "d": 11}) self.assertEqual(second, {"b": 20, "c": 30}) self.assertEqual(third, {"c": 300, "d": 400})
def fake_loader(kind, path, config, parameters, loaded_tasks): for i in range(3): dependencies = {} if i >= 1: dependencies['prev'] = '{}-t-{}'.format(kind, i-1) task = { 'kind': kind, 'label': '{}-t-{}'.format(kind, i), 'attributes': {'_tasknum': str(i)}, 'task': {'i': i}, 'dependencies': dependencies, } if 'job-defaults' in config: task = merge(config['job-defaults'], task) yield task
def test_merge(self): first = {'a': 1, 'b': 2, 'd': 11} second = {'b': 20, 'c': 30} third = {'c': 300, 'd': 400} expected = { 'a': 1, 'b': 20, 'c': 300, 'd': 400, } self.assertEqual(merge(first, second, third), expected) # inputs haven't changed.. self.assertEqual(first, {'a': 1, 'b': 2, 'd': 11}) self.assertEqual(second, {'b': 20, 'c': 30}) self.assertEqual(third, {'c': 300, 'd': 400})
def fake_loader(kind, path, config, parameters, loaded_tasks): for i in range(3): dependencies = {} if i >= 1: dependencies['prev'] = '{}-t-{}'.format(kind, i-1) task = { 'kind': kind, 'label': '{}-t-{}'.format(kind, i), 'attributes': {'_tasknum': str(i)}, 'task': {'i': i}, 'dependencies': dependencies, } if 'job-defaults' in config: task = merge(config['job-defaults'], task) yield task
def fake_loader(kind, path, config, parameters, loaded_tasks): for i in range(3): dependencies = {} if i >= 1: dependencies['prev'] = '{}-t-{}'.format(kind, i-1) task = { 'kind': kind, 'label': '{}-t-{}'.format(kind, i), 'description': '{} task {}'.format(kind, i), 'attributes': {'_tasknum': six.text_type(i)}, 'task': {'i': i, "metadata": {"name": "t-{}".format(i)}}, 'dependencies': dependencies, } if 'job-defaults' in config: task = merge(config['job-defaults'], task) yield task
def services_and_libraries_loader(kind, path, config, parameters, loaded_tasks): for package in [ d for d in Path(config["workspace"]).iterdir() if d.is_dir() ]: job = merge( config.get("job-defaults", {}), { "name": package.name, "description": "package tests for {}".format(package.name), "run": { "command": "./db/test-setup.sh && yarn workspace taskcluster-{}{} coverage:report" .format(config.get("prefix", ''), package.name) } }, config.get("job-overrides", {}).get(package.name, {})) logger.debug("Generating tasks for {} {}".format(kind, package.name)) yield job
def loader(kind, path, config, params, loaded_tasks): not_for_components = config.get("not-for-components", []) jobs = { '{}{}'.format('' if build_type == 'regular' else build_type + '-', component['name']): { 'attributes': { 'build-type': build_type, 'component': component['name'], } } for component in get_components() for build_type in ('regular', 'nightly', 'release', 'snapshot') if (component['name'] not in not_for_components and ( component['shouldPublish'] or build_type == 'regular')) } jobs = merge(jobs, config.pop('overriden-jobs', {})) config['jobs'] = jobs return base_loader(kind, path, config, params, loaded_tasks)
def loader(kind, path, config, params, loaded_tasks): # Build everything unless we have an optimization strategy (defined below). files_changed = [] affected_components = ALL_COMPONENTS upstream_component_dependencies = defaultdict(set) downstream_component_dependencies = defaultdict(set) for component, deps in get_upstream_deps_for_components( [c["name"] for c in get_components()]): if deps: logger.info( f"Found direct upstream dependencies for component '{component}': {deps}" ) else: logger.info( "No direct upstream dependencies found for component '%s'" % component) upstream_component_dependencies[component] = deps for d in deps: downstream_component_dependencies[d].add(component) if params["tasks_for"] == "github-pull-request": logger.info("Processing pull request %s" % params["pull_request_number"]) files_changed = get_files_changed_pr(params["base_repository"], params["pull_request_number"]) affected_components = get_affected_components( files_changed, config.get("files-affecting-components"), upstream_component_dependencies, downstream_component_dependencies) elif params["tasks_for"] == "github-push": if params["base_rev"] in _GIT_ZERO_HASHES: logger.warn( "base_rev is a zero hash, meaning there is no previous push. Building every component..." ) elif params["head_ref"] == "refs/heads/main": # Disable the affected_components optimization to make sure we execute all tests to get # a complete code coverage report for pushes to 'main'. # See https://github.com/mozilla-mobile/android-components/issues/9382#issuecomment-760506327 logger.info( "head_ref is refs/heads/main. Building every component...") else: logger.info("Processing push for commit range {} -> {}".format( params["base_rev"], params["head_rev"])) files_changed = get_files_changed_push(params["base_repository"], params["base_rev"], params["head_rev"]) affected_components = get_affected_components( files_changed, config.get("files-affecting-components"), upstream_component_dependencies, downstream_component_dependencies) logger.info("Files changed: %s" % " ".join(files_changed)) if affected_components is ALL_COMPONENTS: logger.info("Affected components: ALL") else: logger.info("Affected components: %s" % " ".join(sorted(affected_components))) not_for_components = config.get("not-for-components", []) jobs = { '{}{}'.format('' if build_type == 'regular' else build_type + '-', component['name']): { 'attributes': { 'build-type': build_type, 'component': component['name'], } } for component in get_components() for build_type in ('regular', 'nightly', 'release') if ((affected_components is ALL_COMPONENTS or component['name'] in affected_components) and component['name'] not in not_for_components and ( component['shouldPublish'] or build_type == 'regular')) } # Filter away overridden jobs that we wouldn't build anyways to avoid ending up with # partial job entries. overridden_jobs = { k: v for k, v in config.pop('overriden-jobs', {}).items() if affected_components is ALL_COMPONENTS or k in jobs.keys() } jobs = merge(jobs, overridden_jobs) config['jobs'] = jobs return base_loader(kind, path, config, params, loaded_tasks)