def __call__(self, name): ''' So this object can easily be passed to mustache we allow it to be called directly... ''' if name in self._names: return self._names[name]; self._names[name] = slugid() return self._names[name]
def __call__(self, name): ''' So this object can easily be passed to mustache we allow it to be called directly... ''' if name in self._names: return self._names[name] self._names[name] = slugid() return self._names[name]
def create_graph(self, **params): from taskcluster_graph.commit_parser import parse_commit from taskcluster_graph.slugid import slugid from taskcluster_graph.from_now import ( json_time_from_now, current_json_time, ) from taskcluster_graph.templates import Templates import taskcluster_graph.build_task project = params['project'] message = params.get('message', '') if project == 'try' else DEFAULT_TRY # Message would only be blank when not created from decision task if project == 'try' and not message: sys.stderr.write( "Must supply commit message when creating try graph. " \ "Example: --message='try: -b do -p all -u all'" ) sys.exit(1) templates = Templates(ROOT) job_path = os.path.join(ROOT, 'tasks', 'branches', project, 'job_flags.yml') job_path = job_path if os.path.exists(job_path) else DEFAULT_JOB_PATH jobs = templates.load(job_path, {}) job_graph = parse_commit(message, jobs) mozharness = load_mozharness_info() # Template parameters used when expanding the graph parameters = dict(gaia_info().items() + { 'index': 'index.garbage.staging.mshal-testing', #TODO 'project': project, 'pushlog_id': params.get('pushlog_id', 0), 'docker_image': docker_image, 'base_repository': params['base_repository'] or \ params['head_repository'], 'head_repository': params['head_repository'], 'head_ref': params['head_ref'] or params['head_rev'], 'head_rev': params['head_rev'], 'owner': params['owner'], 'from_now': json_time_from_now, 'now': current_json_time(), 'mozharness_repository': mozharness['repo'], 'mozharness_rev': mozharness['revision'], 'mozharness_ref':mozharness.get('reference', mozharness['revision']), 'revision_hash': params['revision_hash'] }.items()) treeherder_route = '{}.{}'.format( params['project'], params.get('revision_hash', '') ) routes_file = os.path.join(ROOT, 'routes.json') with open(routes_file) as f: contents = json.load(f) json_routes = contents['routes'] # TODO: Nightly and/or l10n routes # Task graph we are generating for taskcluster... graph = { 'tasks': [], 'scopes': [] } if params['revision_hash']: for env in TREEHERDER_ROUTES: graph['scopes'].append('queue:route:{}.{}'.format(TREEHERDER_ROUTES[env], treeherder_route)) graph['metadata'] = { 'source': 'http://todo.com/what/goes/here', 'owner': params['owner'], # TODO: Add full mach commands to this example? 'description': 'Task graph generated via ./mach taskcluster-graph', 'name': 'task graph local' } for build in job_graph: build_parameters = dict(parameters) build_parameters['build_slugid'] = slugid() build_task = templates.load(build['task'], build_parameters) if params['revision_hash']: decorate_task_treeherder_routes(build_task['task'], treeherder_route) decorate_task_json_routes(build, build_task['task'], json_routes, build_parameters) # Ensure each build graph is valid after construction. taskcluster_graph.build_task.validate(build_task) graph['tasks'].append(build_task) test_packages_url, tests_url = None, None if 'test_packages' in build_task['task']['extra']['locations']: test_packages_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['test_packages'] ) if 'tests' in build_task['task']['extra']['locations']: tests_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['tests'] ) build_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['build'] ) # img_url is only necessary for device builds img_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations'].get('img', '') ) define_task = DEFINE_TASK.format(build_task['task']['workerType']) graph['scopes'].append(define_task) graph['scopes'].extend(build_task['task'].get('scopes', [])) route_scopes = map(lambda route: 'queue:route:' + route, build_task['task'].get('routes', [])) graph['scopes'].extend(route_scopes) # Treeherder symbol configuration for the graph required for each # build so tests know which platform they belong to. build_treeherder_config = build_task['task']['extra']['treeherder'] if 'machine' not in build_treeherder_config: message = '({}), extra.treeherder.machine required for all builds' raise ValueError(message.format(build['task'])) if 'build' not in build_treeherder_config: build_treeherder_config['build'] = \ build_treeherder_config['machine'] if 'collection' not in build_treeherder_config: build_treeherder_config['collection'] = { 'opt': True } if len(build_treeherder_config['collection'].keys()) != 1: message = '({}), extra.treeherder.collection must contain one type' raise ValueError(message.fomrat(build['task'])) for post_build in build['post-build']: # copy over the old parameters to update the template post_parameters = copy.copy(build_parameters) post_task = configure_dependent_task(post_build['task'], post_parameters, slugid(), templates, build_treeherder_config) graph['tasks'].append(post_task) for test in build['dependents']: test = test['allowed_build_tasks'][build['task']] test_parameters = copy.copy(build_parameters) test_parameters['build_url'] = build_url test_parameters['img_url'] = img_url if tests_url: test_parameters['tests_url'] = tests_url if test_packages_url: test_parameters['test_packages_url'] = test_packages_url test_definition = templates.load(test['task'], {})['task'] chunk_config = test_definition['extra']['chunks'] # Allow branch configs to override task level chunking... if 'chunks' in test: chunk_config['total'] = test['chunks'] test_parameters['total_chunks'] = chunk_config['total'] for chunk in range(1, chunk_config['total'] + 1): if 'only_chunks' in test and \ chunk not in test['only_chunks']: continue test_parameters['chunk'] = chunk test_task = configure_dependent_task(test['task'], test_parameters, slugid(), templates, build_treeherder_config) if params['revision_hash']: decorate_task_treeherder_routes( test_task['task'], treeherder_route) graph['tasks'].append(test_task) define_task = DEFINE_TASK.format( test_task['task']['workerType'] ) graph['scopes'].append(define_task) graph['scopes'].extend(test_task['task'].get('scopes', [])) graph['scopes'] = list(set(graph['scopes'])) # When we are extending the graph remove extra fields... if params['ci'] is True: graph.pop('scopes', None) graph.pop('metadata', None) print(json.dumps(graph, indent=4))
def create_graph(self, **params): from taskcluster_graph.commit_parser import parse_commit from taskcluster_graph.slugid import slugid from taskcluster_graph.from_now import ( json_time_from_now, current_json_time, ) from taskcluster_graph.templates import Templates import taskcluster_graph.build_task project = params['project'] message = params.get('message', '') if project == 'try' else DEFAULT_TRY # Message would only be blank when not created from decision task if project == 'try' and not message: sys.stderr.write( "Must supply commit message when creating try graph. " \ "Example: --message='try: -b do -p all -u all'" ) sys.exit(1) templates = Templates(ROOT) job_path = os.path.join(ROOT, 'tasks', 'branches', project, 'job_flags.yml') job_path = job_path if os.path.exists(job_path) else DEFAULT_JOB_PATH jobs = templates.load(job_path, {}) job_graph = parse_commit(message, jobs) mozharness = load_mozharness_info() # Template parameters used when expanding the graph parameters = dict(gaia_info().items() + { 'index': 'index.garbage.staging.mshal-testing', #TODO 'project': project, 'pushlog_id': params.get('pushlog_id', 0), 'docker_image': docker_image, 'base_repository': params['base_repository'] or \ params['head_repository'], 'head_repository': params['head_repository'], 'head_ref': params['head_ref'] or params['head_rev'], 'head_rev': params['head_rev'], 'owner': params['owner'], 'from_now': json_time_from_now, 'now': current_json_time(), 'mozharness_repository': mozharness['repo'], 'mozharness_rev': mozharness['revision'], 'mozharness_ref':mozharness.get('reference', mozharness['revision']), 'revision_hash': params['revision_hash'] }.items()) treeherder_route = '{}.{}'.format(params['project'], params.get('revision_hash', '')) routes_file = os.path.join(ROOT, 'routes.json') with open(routes_file) as f: contents = json.load(f) json_routes = contents['routes'] # TODO: Nightly and/or l10n routes # Task graph we are generating for taskcluster... graph = {'tasks': [], 'scopes': []} if params['revision_hash']: for env in TREEHERDER_ROUTES: graph['scopes'].append('queue:route:{}.{}'.format( TREEHERDER_ROUTES[env], treeherder_route)) graph['metadata'] = { 'source': 'http://todo.com/what/goes/here', 'owner': params['owner'], # TODO: Add full mach commands to this example? 'description': 'Task graph generated via ./mach taskcluster-graph', 'name': 'task graph local' } for build in job_graph: build_parameters = dict(parameters) build_parameters['build_slugid'] = slugid() build_task = templates.load(build['task'], build_parameters) if params['revision_hash']: decorate_task_treeherder_routes(build_task['task'], treeherder_route) decorate_task_json_routes(build, build_task['task'], json_routes, build_parameters) # Ensure each build graph is valid after construction. taskcluster_graph.build_task.validate(build_task) graph['tasks'].append(build_task) test_packages_url, tests_url = None, None if 'test_packages' in build_task['task']['extra']['locations']: test_packages_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['test_packages']) if 'tests' in build_task['task']['extra']['locations']: tests_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['tests']) build_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['build']) # img_url is only necessary for device builds img_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations'].get('img', '')) define_task = DEFINE_TASK.format(build_task['task']['workerType']) graph['scopes'].append(define_task) graph['scopes'].extend(build_task['task'].get('scopes', [])) route_scopes = map(lambda route: 'queue:route:' + route, build_task['task'].get('routes', [])) graph['scopes'].extend(route_scopes) # Treeherder symbol configuration for the graph required for each # build so tests know which platform they belong to. build_treeherder_config = build_task['task']['extra']['treeherder'] if 'machine' not in build_treeherder_config: message = '({}), extra.treeherder.machine required for all builds' raise ValueError(message.format(build['task'])) if 'build' not in build_treeherder_config: build_treeherder_config['build'] = \ build_treeherder_config['machine'] if 'collection' not in build_treeherder_config: build_treeherder_config['collection'] = {'opt': True} if len(build_treeherder_config['collection'].keys()) != 1: message = '({}), extra.treeherder.collection must contain one type' raise ValueError(message.fomrat(build['task'])) for post_build in build['post-build']: # copy over the old parameters to update the template post_parameters = copy.copy(build_parameters) post_task = configure_dependent_task(post_build['task'], post_parameters, slugid(), templates, build_treeherder_config) graph['tasks'].append(post_task) for test in build['dependents']: test = test['allowed_build_tasks'][build['task']] test_parameters = copy.copy(build_parameters) test_parameters['build_url'] = build_url test_parameters['img_url'] = img_url if tests_url: test_parameters['tests_url'] = tests_url if test_packages_url: test_parameters['test_packages_url'] = test_packages_url test_definition = templates.load(test['task'], {})['task'] chunk_config = test_definition['extra']['chunks'] # Allow branch configs to override task level chunking... if 'chunks' in test: chunk_config['total'] = test['chunks'] test_parameters['total_chunks'] = chunk_config['total'] for chunk in range(1, chunk_config['total'] + 1): if 'only_chunks' in test and \ chunk not in test['only_chunks']: continue test_parameters['chunk'] = chunk test_task = configure_dependent_task( test['task'], test_parameters, slugid(), templates, build_treeherder_config) if params['revision_hash']: decorate_task_treeherder_routes( test_task['task'], treeherder_route) graph['tasks'].append(test_task) define_task = DEFINE_TASK.format( test_task['task']['workerType']) graph['scopes'].append(define_task) graph['scopes'].extend(test_task['task'].get('scopes', [])) graph['scopes'] = list(set(graph['scopes'])) # When we are extending the graph remove extra fields... if params['ci'] is True: graph.pop('scopes', None) graph.pop('metadata', None) print(json.dumps(graph, indent=4))
def create_graph(self, **params): project = params['project'] message = params.get('message', '') if project == 'try' else DEFAULT_TRY # Message would only be blank when not created from decision task if project == 'try' and not message: sys.stderr.write( "Must supply commit message when creating try graph. " \ "Example: --message='try: -b do -p all -u all'" ) sys.exit(1) templates = Templates(ROOT) job_path = os.path.join(ROOT, 'tasks', 'branches', project, 'job_flags.yml') job_path = job_path if os.path.exists(job_path) else DEFAULT_JOB_PATH jobs = templates.load(job_path, {}) job_graph = parse_commit(message, jobs) # Template parameters used when expanding the graph parameters = dict(gaia_info().items() + { 'docker_image': docker_image, 'base_repository': params['base_repository'] or \ params['head_repository'], 'head_repository': params['head_repository'], 'head_ref': params['head_ref'] or params['head_rev'], 'head_rev': params['head_rev'], 'owner': params['owner'], 'from_now': json_time_from_now, 'now': datetime.datetime.now().isoformat(), 'mozharness_repository': params['mozharness_repository'], 'mozharness_rev': params['mozharness_rev'], 'revision_hash': params['revision_hash'] }.items()) treeherder_route = '{}.{}.{}'.format( TREEHERDER_ROUTE_PREFIX, params['project'], params.get('revision_hash', '') ) # Task graph we are generating for taskcluster... graph = { 'tasks': [], 'scopes': [] } if params['revision_hash']: graph['scopes'].append('queue:route:{}'.format(treeherder_route)) graph['metadata'] = { 'source': 'http://todo.com/what/goes/here', 'owner': params['owner'], # TODO: Add full mach commands to this example? 'description': 'Task graph generated via ./mach taskcluster-graph', 'name': 'task graph local' } for build in job_graph: build_parameters = dict(parameters) build_parameters['build_slugid'] = slugid() build_task = templates.load(build['task'], build_parameters) if 'routes' not in build_task['task']: build_task['task']['routes'] = []; if params['revision_hash']: build_task['task']['routes'].append(treeherder_route) # Ensure each build graph is valid after construction. taskcluster_graph.build_task.validate(build_task) graph['tasks'].append(build_task) tests_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['tests'] ) build_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['build'] ) define_task = DEFINE_TASK.format(build_task['task']['workerType']) graph['scopes'].append(define_task) graph['scopes'].extend(build_task['task'].get('scopes', [])) # Treeherder symbol configuration for the graph required for each # build so tests know which platform they belong to. build_treeherder_config = build_task['task']['extra']['treeherder'] if 'machine' not in build_treeherder_config: message = '({}), extra.treeherder.machine required for all builds' raise ValueError(message.format(build['task'])) if 'build' not in build_treeherder_config: build_treeherder_config['build'] = \ build_treeherder_config['machine'] if 'collection' not in build_treeherder_config: build_treeherder_config['collection'] = { 'opt': True } if len(build_treeherder_config['collection'].keys()) != 1: message = '({}), extra.treeherder.collection must contain one type' raise ValueError(message.fomrat(build['task'])) for test in build['dependents']: test = test['allowed_build_tasks'][build['task']] test_parameters = copy.copy(build_parameters) test_parameters['build_url'] = build_url test_parameters['tests_url'] = tests_url test_parameters['total_chunks'] = 1 if 'chunks' in test: test_parameters['total_chunks'] = test['chunks'] for chunk in range(1, test_parameters['total_chunks'] + 1): if 'only_chunks' in test and \ chunk not in test['only_chunks']: continue; test_parameters['chunk'] = chunk test_task = templates.load(test['task'], test_parameters) test_task['taskId'] = slugid() if 'requires' not in test_task: test_task['requires'] = [] test_task['requires'].append(test_parameters['build_slugid']) if 'treeherder' not in test_task['task']['extra']: test_task['task']['extra']['treeherder'] = {} # Copy over any treeherder configuration from the build so # tests show up under the same platform... test_treeherder_config = test_task['task']['extra']['treeherder'] test_treeherder_config['collection'] = \ build_treeherder_config.get('collection', {}) test_treeherder_config['build'] = \ build_treeherder_config.get('build', {}) test_treeherder_config['machine'] = \ build_treeherder_config.get('machine', {}) if 'routes' not in test_task['task']: test_task['task']['routes'] = [] if 'scopes' not in test_task['task']: test_task['task']['scopes'] = [] if params['revision_hash']: test_task['task']['routes'].append(treeherder_route) test_task['task']['scopes'].append('queue:route:{}'.format(treeherder_route)) graph['tasks'].append(test_task) define_task = DEFINE_TASK.format( test_task['task']['workerType'] ) graph['scopes'].append(define_task) graph['scopes'].extend(test_task['task'].get('scopes', [])) graph['scopes'] = list(set(graph['scopes'])) # When we are extending the graph remove extra fields... if params['ci'] is True: graph.pop('scopes', None) graph.pop('metadata', None) print(json.dumps(graph, indent=4))
def create_graph(self, **params): project = params['project'] message = params.get('message', '') if project == 'try' else DEFAULT_TRY # Message would only be blank when not created from decision task if project == 'try' and not message: sys.stderr.write( "Must supply commit message when creating try graph. " \ "Example: --message='try: -b do -p all -u all'" ) sys.exit(1) templates = Templates(ROOT) job_path = os.path.join(ROOT, 'tasks', 'branches', project, 'job_flags.yml') job_path = job_path if os.path.exists(job_path) else DEFAULT_JOB_PATH jobs = templates.load(job_path, {}) job_graph = parse_commit(message, jobs) # Template parameters used when expanding the graph parameters = dict(gaia_info().items() + { 'docker_image': docker_image, 'base_repository': params['base_repository'] or \ params['head_repository'], 'head_repository': params['head_repository'], 'head_ref': params['head_ref'] or params['head_rev'], 'head_rev': params['head_rev'], 'owner': params['owner'], 'from_now': json_time_from_now, 'now': datetime.datetime.now().isoformat(), 'mozharness_repository': params['mozharness_repository'], 'mozharness_rev': params['mozharness_rev'], 'revision_hash': params['revision_hash'] }.items()) treeherder_route = '{}.{}.{}'.format(TREEHERDER_ROUTE_PREFIX, params['project'], params.get('revision_hash', '')) # Task graph we are generating for taskcluster... graph = {'tasks': [], 'scopes': []} if params['revision_hash']: graph['scopes'].append('queue:route:{}'.format(treeherder_route)) graph['metadata'] = { 'source': 'http://todo.com/what/goes/here', 'owner': params['owner'], # TODO: Add full mach commands to this example? 'description': 'Task graph generated via ./mach taskcluster-graph', 'name': 'task graph local' } for build in job_graph: build_parameters = dict(parameters) build_parameters['build_slugid'] = slugid() build_task = templates.load(build['task'], build_parameters) if 'routes' not in build_task['task']: build_task['task']['routes'] = [] if params['revision_hash']: build_task['task']['routes'].append(treeherder_route) # Ensure each build graph is valid after construction. taskcluster_graph.build_task.validate(build_task) graph['tasks'].append(build_task) tests_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['tests']) build_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['build']) define_task = DEFINE_TASK.format(build_task['task']['workerType']) graph['scopes'].append(define_task) graph['scopes'].extend(build_task['task'].get('scopes', [])) # Treeherder symbol configuration for the graph required for each # build so tests know which platform they belong to. build_treeherder_config = build_task['task']['extra']['treeherder'] if 'machine' not in build_treeherder_config: message = '({}), extra.treeherder.machine required for all builds' raise ValueError(message.format(build['task'])) if 'build' not in build_treeherder_config: build_treeherder_config['build'] = \ build_treeherder_config['machine'] if 'collection' not in build_treeherder_config: build_treeherder_config['collection'] = {'opt': True} if len(build_treeherder_config['collection'].keys()) != 1: message = '({}), extra.treeherder.collection must contain one type' raise ValueError(message.fomrat(build['task'])) for test in build['dependents']: test = test['allowed_build_tasks'][build['task']] test_parameters = copy.copy(build_parameters) test_parameters['build_url'] = build_url test_parameters['tests_url'] = tests_url test_parameters['total_chunks'] = 1 if 'chunks' in test: test_parameters['total_chunks'] = test['chunks'] for chunk in range(1, test_parameters['total_chunks'] + 1): if 'only_chunks' in test and \ chunk not in test['only_chunks']: continue test_parameters['chunk'] = chunk test_task = templates.load(test['task'], test_parameters) test_task['taskId'] = slugid() if 'requires' not in test_task: test_task['requires'] = [] test_task['requires'].append( test_parameters['build_slugid']) if 'treeherder' not in test_task['task']['extra']: test_task['task']['extra']['treeherder'] = {} # Copy over any treeherder configuration from the build so # tests show up under the same platform... test_treeherder_config = test_task['task']['extra'][ 'treeherder'] test_treeherder_config['collection'] = \ build_treeherder_config.get('collection', {}) test_treeherder_config['build'] = \ build_treeherder_config.get('build', {}) test_treeherder_config['machine'] = \ build_treeherder_config.get('machine', {}) if 'routes' not in test_task['task']: test_task['task']['routes'] = [] if 'scopes' not in test_task['task']: test_task['task']['scopes'] = [] if params['revision_hash']: test_task['task']['routes'].append(treeherder_route) test_task['task']['scopes'].append( 'queue:route:{}'.format(treeherder_route)) graph['tasks'].append(test_task) define_task = DEFINE_TASK.format( test_task['task']['workerType']) graph['scopes'].append(define_task) graph['scopes'].extend(test_task['task'].get('scopes', [])) graph['scopes'] = list(set(graph['scopes'])) # When we are extending the graph remove extra fields... if params['ci'] is True: graph.pop('scopes', None) graph.pop('metadata', None) print(json.dumps(graph, indent=4))
def create_graph(self, **params): from taskcluster_graph.commit_parser import parse_commit from taskcluster_graph.slugid import slugid from taskcluster_graph.from_now import json_time_from_now, current_json_time from taskcluster_graph.templates import Templates import taskcluster_graph.build_task project = params["project"] message = params.get("message", "") if project == "try" else DEFAULT_TRY # Message would only be blank when not created from decision task if project == "try" and not message: sys.stderr.write( "Must supply commit message when creating try graph. " "Example: --message='try: -b do -p all -u all'" ) sys.exit(1) templates = Templates(ROOT) job_path = os.path.join(ROOT, "tasks", "branches", project, "job_flags.yml") job_path = job_path if os.path.exists(job_path) else DEFAULT_JOB_PATH jobs = templates.load(job_path, {}) job_graph = parse_commit(message, jobs) mozharness = load_mozharness_info() # Template parameters used when expanding the graph parameters = dict( gaia_info().items() + { "project": project, "pushlog_id": params.get("pushlog_id", 0), "docker_image": docker_image, "base_repository": params["base_repository"] or params["head_repository"], "head_repository": params["head_repository"], "head_ref": params["head_ref"] or params["head_rev"], "head_rev": params["head_rev"], "owner": params["owner"], "from_now": json_time_from_now, "now": current_json_time(), "mozharness_repository": mozharness["repo"], "mozharness_rev": mozharness["revision"], "mozharness_ref": mozharness.get("reference", mozharness["revision"]), "revision_hash": params["revision_hash"], }.items() ) treeherder_route = "{}.{}".format(params["project"], params.get("revision_hash", "")) # Task graph we are generating for taskcluster... graph = {"tasks": [], "scopes": []} if params["revision_hash"]: for env in TREEHERDER_ROUTES: graph["scopes"].append("queue:route:{}.{}".format(TREEHERDER_ROUTES[env], treeherder_route)) graph["metadata"] = { "source": "http://todo.com/what/goes/here", "owner": params["owner"], # TODO: Add full mach commands to this example? "description": "Task graph generated via ./mach taskcluster-graph", "name": "task graph local", } for build in job_graph: build_parameters = dict(parameters) build_parameters["build_slugid"] = slugid() build_task = templates.load(build["task"], build_parameters) if "routes" not in build_task["task"]: build_task["task"]["routes"] = [] if params["revision_hash"]: decorate_task_treeherder_routes(build_task["task"], treeherder_route) # Ensure each build graph is valid after construction. taskcluster_graph.build_task.validate(build_task) graph["tasks"].append(build_task) test_packages_url, tests_url = None, None if "test_packages" in build_task["task"]["extra"]["locations"]: test_packages_url = ARTIFACT_URL.format( build_parameters["build_slugid"], build_task["task"]["extra"]["locations"]["test_packages"] ) if "tests" in build_task["task"]["extra"]["locations"]: tests_url = ARTIFACT_URL.format( build_parameters["build_slugid"], build_task["task"]["extra"]["locations"]["tests"] ) build_url = ARTIFACT_URL.format( build_parameters["build_slugid"], build_task["task"]["extra"]["locations"]["build"] ) # img_url is only necessary for device builds img_url = ARTIFACT_URL.format( build_parameters["build_slugid"], build_task["task"]["extra"]["locations"].get("img", "") ) define_task = DEFINE_TASK.format(build_task["task"]["workerType"]) graph["scopes"].append(define_task) graph["scopes"].extend(build_task["task"].get("scopes", [])) route_scopes = map(lambda route: "queue:route:" + route, build_task["task"].get("routes", [])) graph["scopes"].extend(route_scopes) # Treeherder symbol configuration for the graph required for each # build so tests know which platform they belong to. build_treeherder_config = build_task["task"]["extra"]["treeherder"] if "machine" not in build_treeherder_config: message = "({}), extra.treeherder.machine required for all builds" raise ValueError(message.format(build["task"])) if "build" not in build_treeherder_config: build_treeherder_config["build"] = build_treeherder_config["machine"] if "collection" not in build_treeherder_config: build_treeherder_config["collection"] = {"opt": True} if len(build_treeherder_config["collection"].keys()) != 1: message = "({}), extra.treeherder.collection must contain one type" raise ValueError(message.fomrat(build["task"])) for test in build["dependents"]: test = test["allowed_build_tasks"][build["task"]] test_parameters = copy.copy(build_parameters) test_parameters["build_url"] = build_url test_parameters["img_url"] = img_url if tests_url: test_parameters["tests_url"] = tests_url if test_packages_url: test_parameters["test_packages_url"] = test_packages_url test_definition = templates.load(test["task"], {})["task"] chunk_config = test_definition["extra"]["chunks"] # Allow branch configs to override task level chunking... if "chunks" in test: chunk_config["total"] = test["chunks"] test_parameters["total_chunks"] = chunk_config["total"] for chunk in range(1, chunk_config["total"] + 1): if "only_chunks" in test and chunk not in test["only_chunks"]: continue test_parameters["chunk"] = chunk test_task = templates.load(test["task"], test_parameters) test_task["taskId"] = slugid() if "requires" not in test_task: test_task["requires"] = [] test_task["requires"].append(test_parameters["build_slugid"]) if "treeherder" not in test_task["task"]["extra"]: test_task["task"]["extra"]["treeherder"] = {} # Copy over any treeherder configuration from the build so # tests show up under the same platform... test_treeherder_config = test_task["task"]["extra"]["treeherder"] test_treeherder_config["collection"] = build_treeherder_config.get("collection", {}) test_treeherder_config["build"] = build_treeherder_config.get("build", {}) test_treeherder_config["machine"] = build_treeherder_config.get("machine", {}) if "routes" not in test_task["task"]: test_task["task"]["routes"] = [] if "scopes" not in test_task["task"]: test_task["task"]["scopes"] = [] if params["revision_hash"]: decorate_task_treeherder_routes(test_task["task"], treeherder_route) graph["tasks"].append(test_task) define_task = DEFINE_TASK.format(test_task["task"]["workerType"]) graph["scopes"].append(define_task) graph["scopes"].extend(test_task["task"].get("scopes", [])) graph["scopes"] = list(set(graph["scopes"])) # When we are extending the graph remove extra fields... if params["ci"] is True: graph.pop("scopes", None) graph.pop("metadata", None) print(json.dumps(graph, indent=4))