def normalize_image_details(graph, task, seen_images, params, decision_task_id): ''' This takes a task-image payload and creates an image task to build that image. task-image payload is then converted to use a specific task ID of that built image. All tasks within the graph requiring this same image will have their image details normalized and require the same image build task. ''' image = task['task']['payload']['image'] if isinstance(image, str) or image.get('type', 'docker-image') == 'docker-image': return if 'requires' not in task: task['requires'] = [] name, details = get_image_details(seen_images, image['taskId']) if details.get('required', False) is True or image_requires_building(details) is False: if 'required' in details: task['requires'].append(details['taskId']) return image_parameters = create_image_task_parameters(params, name, details) if decision_task_id: image_artifact_path = "public/decision_task/image_contexts/{}/context.tar.gz".format( name) destination = "/home/worker/artifacts/decision_task/image_contexts/{}/context.tar.gz".format( name) image_parameters['context_url'] = ARTIFACT_URL.format( decision_task_id, image_artifact_path) create_context_tar(image_parameters['context_path'], destination, name) templates = Templates(TASKCLUSTER_ROOT) image_task = templates.load(IMAGE_BUILD_TASK, image_parameters) if params['revision_hash']: routes_transform.decorate_task_treeherder_routes( image_task['task'], "{}.{}".format(params['project'], params['revision_hash'])) routes_transform.decorate_task_json_routes(image_task['task'], get_json_routes(), image_parameters) graph['tasks'].append(image_task) task['requires'].append(details['taskId']) define_task = DEFINE_TASK.format(image_task['task']['workerType']) graph['scopes'].add(define_task) graph['scopes'] |= set(image_task['task'].get('scopes', [])) route_scopes = map(lambda route: 'queue:route:' + route, image_task['task'].get('routes', [])) graph['scopes'] |= set(route_scopes) details['required'] = True
def run_task(self, **params): from taskcluster_graph.slugidjar import SlugidJar from taskcluster_graph.from_now import ( json_time_from_now, current_json_time, ) from taskcluster_graph.templates import Templates templates = Templates(ROOT) # Template parameters used when expanding the graph parameters = dict( gaia_info().items() + { 'source': 'http://todo.com/soon', 'project': params['project'], 'comment': params['comment'], 'url': params['url'], 'revision': params['revision'], 'revision_hash': params.get('revision_hash', ''), 'owner': params['owner'], 'as_slugid': SlugidJar(), 'from_now': json_time_from_now, 'now': current_json_time() }.items()) task = templates.load(params['task'], parameters) print(json.dumps(task, indent=4))
def create_ci_build(self, **params): from taskcluster_graph.templates import Templates from taskcluster_graph.image_builder import docker_image import taskcluster_graph.build_task templates = Templates(ROOT) # TODO handle git repos head_repository = params['head_repository'] if not head_repository: head_repository = get_hg_url() head_rev = params['head_rev'] if not head_rev: head_rev = get_latest_hg_revision(head_repository) head_ref = params['head_ref'] or head_rev # Default to current time if querying the head rev fails pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime()) vcs_info = query_vcs_info(params['head_repository'], params['head_rev']) if vcs_info: pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(vcs_info.pushdate)) from taskcluster_graph.from_now import ( json_time_from_now, current_json_time, ) build_parameters = dict(gaia_info().items() + { 'docker_image': docker_image, 'owner': params['owner'], 'level': params['level'], 'from_now': json_time_from_now, 'now': current_json_time(), 'base_repository': params['base_repository'] or head_repository, 'head_repository': head_repository, 'head_rev': head_rev, 'head_ref': head_ref, 'pushdate': pushdate, 'pushtime': pushdate[8:], 'year': pushdate[0:4], 'month': pushdate[4:6], 'day': pushdate[6:8], }.items()) try: build_task = templates.load(params['build_task'], build_parameters) set_interactive_task(build_task, params.get('interactive', False)) except IOError: sys.stderr.write( "Could not load build task file. Ensure path is a relative " \ "path from testing/taskcluster" ) sys.exit(1) taskcluster_graph.build_task.validate(build_task) print(json.dumps(build_task['task'], indent=4))
def create_ci_build(self, **params): templates = Templates(ROOT) # TODO handle git repos head_repository = params['head_repository'] if not head_repository: head_repository = get_hg_url() head_rev = params['head_rev'] if not head_rev: head_rev = get_latest_hg_revision(head_repository) head_ref = params['head_ref'] or head_rev mozharness = load_mozharness_info() mozharness_repo = params['mozharness_repository'] if mozharness_repo is None: mozharness_repo = mozharness['repo'] mozharness_rev = params['mozharness_rev'] if mozharness_rev is None: mozharness_rev = mozharness['revision'] build_parameters = dict(gaia_info().items() + { 'docker_image': docker_image, 'owner': params['owner'], 'from_now': json_time_from_now, 'now': current_json_time(), 'base_repository': params['base_repository'] or head_repository, 'head_repository': head_repository, 'head_rev': head_rev, 'head_ref': head_ref, 'mozharness_repository': mozharness_repo, 'mozharness_ref': mozharness_rev, 'mozharness_rev': mozharness_rev }.items()) try: build_task = templates.load(params['build_task'], build_parameters) except IOError: sys.stderr.write( "Could not load build task file. Ensure path is a relative " \ "path from testing/taskcluster" ) sys.exit(1) taskcluster_graph.build_task.validate(build_task) print(json.dumps(build_task['task'], indent=4))
def run_task(self, **params): templates = Templates(ROOT) # Template parameters used when expanding the graph parameters = { 'source': 'http://todo.com/soon', 'project': params['project'], 'comment': params['comment'], 'url': params['url'], 'revision': params['revision'], 'owner': params['owner'], 'as_slugid': SlugidJar(), 'from_now': json_time_from_now, 'now': datetime.datetime.now().isoformat() } task = templates.load(params['task'], parameters) print(json.dumps(task, indent=4))
def create_ci_build(self, **params): from taskcluster_graph.templates import Templates from taskcluster_graph.image_builder import docker_image import taskcluster_graph.build_task templates = Templates(ROOT) # TODO handle git repos head_repository = params['head_repository'] if not head_repository: head_repository = get_hg_url() head_rev = params['head_rev'] if not head_rev: head_rev = get_latest_hg_revision(head_repository) head_ref = params['head_ref'] or head_rev from taskcluster_graph.from_now import ( json_time_from_now, current_json_time, ) build_parameters = dict( gaia_info().items() + { 'docker_image': docker_image, 'owner': params['owner'], 'from_now': json_time_from_now, 'now': current_json_time(), 'base_repository': params['base_repository'] or head_repository, 'head_repository': head_repository, 'head_rev': head_rev, 'head_ref': head_ref, }.items()) try: build_task = templates.load(params['build_task'], build_parameters) set_interactive_task(build_task, params.get('interactive', False)) except IOError: sys.stderr.write( "Could not load build task file. Ensure path is a relative " \ "path from testing/taskcluster" ) sys.exit(1) taskcluster_graph.build_task.validate(build_task) print(json.dumps(build_task['task'], indent=4))
def create_ci_test(self, test_task, task_id='', total_chunks=1, chunk=1, owner=''): if total_chunks is None: total_chunks = 1 if chunk is None: chunk = 1 if chunk < 1 or chunk > total_chunks: raise ValueError( '"chunk" must be a value between 1 and "total_chunks (default 1)"') build_url, img_url, tests_url = self._get_build_and_tests_url(task_id) test_parameters = dict(gaia_info().items() + { 'docker_image': docker_image, 'build_url': ARTIFACT_URL.format(task_id, build_url), 'img_url': ARTIFACT_URL.format(task_id, img_url), 'tests_url': ARTIFACT_URL.format(task_id, tests_url), 'total_chunks': total_chunks, 'chunk': chunk, 'owner': owner, 'from_now': json_time_from_now, 'now': current_json_time() }.items()) try: templates = Templates(ROOT) test_task = templates.load(test_task, test_parameters) except IOError: sys.stderr.write( "Could not load test task file. Ensure path is a relative " \ "path from testing/taskcluster" ) sys.exit(1) print(json.dumps(test_task['task'], indent=4))
def create_graph(self, **params): from functools import partial from mozpack.path import match as mozpackmatch from slugid import nice as slugid import taskcluster_graph.transform.routes as routes_transform import taskcluster_graph.transform.treeherder as treeherder_transform from taskcluster_graph.commit_parser import parse_commit from taskcluster_graph.image_builder import (docker_image, normalize_image_details, task_id_for_image) from taskcluster_graph.from_now import ( json_time_from_now, current_json_time, ) from taskcluster_graph.templates import Templates import taskcluster_graph.build_task if params['dry_run']: from taskcluster_graph.dry_run import ( json_time_from_now, current_json_time, slugid, ) project = params['project'] message = params.get('message', '') if project == 'try' else DEFAULT_TRY templates = Templates(ROOT) job_path = os.path.join(ROOT, 'tasks', 'branches', project, 'job_flags.yml') job_path = job_path if os.path.exists(job_path) else DEFAULT_JOB_PATH jobs = templates.load(job_path, {}) job_graph, trigger_tests = parse_commit(message, jobs) cmdline_interactive = params.get('interactive', False) # Default to current time if querying the head rev fails pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime()) vcs_info = query_vcs_info(params['head_repository'], params['head_rev']) changed_files = set() if vcs_info: pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(vcs_info.pushdate)) sys.stderr.write('%d commits influencing task scheduling:\n' % len(vcs_info.changesets)) for c in vcs_info.changesets: sys.stderr.write( '%s %s\n' % (c['node'][0:12], c['desc'].splitlines()[0].encode( 'ascii', 'ignore'))) changed_files |= set(c['files']) # Template parameters used when expanding the graph seen_images = {} parameters = dict(gaia_info().items() + { 'index': 'index', 'project': project, 'pushlog_id': params.get('pushlog_id', 0), 'docker_image': docker_image, 'task_id_for_image': partial(task_id_for_image, seen_images, project), 'base_repository': params['base_repository'] or \ params['head_repository'], 'head_repository': params['head_repository'], 'head_ref': params['head_ref'] or params['head_rev'], 'head_rev': params['head_rev'], 'pushdate': pushdate, 'pushtime': pushdate[8:], 'year': pushdate[0:4], 'month': pushdate[4:6], 'day': pushdate[6:8], 'owner': params['owner'], 'level': params['level'], 'from_now': json_time_from_now, 'now': current_json_time(), 'revision_hash': params['revision_hash'] }.items()) treeherder_route = '{}.{}'.format(params['project'], params.get('revision_hash', '')) routes_file = os.path.join(ROOT, 'routes.json') with open(routes_file) as f: contents = json.load(f) json_routes = contents['routes'] # TODO: Nightly and/or l10n routes # Task graph we are generating for taskcluster... graph = { 'tasks': [], 'scopes': set(), } if params['revision_hash']: for env in routes_transform.TREEHERDER_ROUTES: route = 'queue:route:{}.{}'.format( routes_transform.TREEHERDER_ROUTES[env], treeherder_route) graph['scopes'].add(route) graph['metadata'] = { 'source': '{repo}file/{rev}/testing/taskcluster/mach_commands.py'.format( repo=params['head_repository'], rev=params['head_rev']), 'owner': params['owner'], # TODO: Add full mach commands to this example? 'description': 'Task graph generated via ./mach taskcluster-graph', 'name': 'task graph local' } # Filter the job graph according to conditions met by this invocation run. def should_run(task): # Old style build or test task that doesn't define conditions. Always runs. if 'when' not in task: return True # Command line override to not filter. if params['ignore_conditions']: return True when = task['when'] # If the task defines file patterns and we have a set of changed # files to compare against, only run if a file pattern matches one # of the changed files. file_patterns = when.get('file_patterns', None) if file_patterns and changed_files: for pattern in file_patterns: for path in changed_files: if mozpackmatch(path, pattern): sys.stderr.write( 'scheduling %s because pattern %s ' 'matches %s\n' % (task['task'], pattern, path)) return True # No file patterns matched. Discard task. sys.stderr.write( 'discarding %s because no relevant files changed\n' % task['task']) return False return True job_graph = filter(should_run, job_graph) all_routes = {} for build in job_graph: interactive = cmdline_interactive or build["interactive"] build_parameters = merge_dicts(parameters, build['additional-parameters']) build_parameters['build_slugid'] = slugid() build_parameters[ 'source'] = '{repo}file/{rev}/testing/taskcluster/{file}'.format( repo=params['head_repository'], rev=params['head_rev'], file=build['task']) build_task = templates.load(build['task'], build_parameters) # Copy build_* attributes to expose them to post-build tasks # as well as json routes and tests task_extra = build_task['task']['extra'] build_parameters['build_name'] = task_extra['build_name'] build_parameters['build_type'] = task_extra['build_type'] build_parameters['build_product'] = task_extra['build_product'] normalize_image_details(graph, build_task, seen_images, build_parameters, os.environ.get('TASK_ID', None)) set_interactive_task(build_task, interactive) # try builds don't use cache if project == "try": remove_caches_from_task(build_task) if params['revision_hash']: treeherder_transform.add_treeherder_revision_info( build_task['task'], params['head_rev'], params['revision_hash']) routes_transform.decorate_task_treeherder_routes( build_task['task'], treeherder_route) routes_transform.decorate_task_json_routes( build_task['task'], json_routes, build_parameters) # Ensure each build graph is valid after construction. taskcluster_graph.build_task.validate(build_task) graph['tasks'].append(build_task) for location in build_task['task']['extra'].get('locations', {}): build_parameters['{}_url'.format( location)] = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations'][location]) for url in build_task['task']['extra'].get('url', {}): build_parameters['{}_url'.format(url)] = \ build_task['task']['extra']['url'][url] define_task = DEFINE_TASK.format(build_task['task']['workerType']) for route in build_task['task'].get('routes', []): if route.startswith('index.gecko.v2') and route in all_routes: raise Exception( "Error: route '%s' is in use by multiple tasks: '%s' and '%s'" % ( route, build_task['task']['metadata']['name'], all_routes[route], )) all_routes[route] = build_task['task']['metadata']['name'] graph['scopes'].add(define_task) graph['scopes'] |= set(build_task['task'].get('scopes', [])) route_scopes = map(lambda route: 'queue:route:' + route, build_task['task'].get('routes', [])) graph['scopes'] |= set(route_scopes) # Treeherder symbol configuration for the graph required for each # build so tests know which platform they belong to. build_treeherder_config = build_task['task']['extra']['treeherder'] if 'machine' not in build_treeherder_config: message = '({}), extra.treeherder.machine required for all builds' raise ValueError(message.format(build['task'])) if 'build' not in build_treeherder_config: build_treeherder_config['build'] = \ build_treeherder_config['machine'] if 'collection' not in build_treeherder_config: build_treeherder_config['collection'] = {'opt': True} if len(build_treeherder_config['collection'].keys()) != 1: message = '({}), extra.treeherder.collection must contain one type' raise ValueError(message.fomrat(build['task'])) for post_build in build['post-build']: # copy over the old parameters to update the template # TODO additional-parameters is currently not an option, only # enabled for build tasks post_parameters = merge_dicts( build_parameters, post_build.get('additional-parameters', {})) post_task = configure_dependent_task(post_build['task'], post_parameters, slugid(), templates, build_treeherder_config) normalize_image_details(graph, post_task, seen_images, build_parameters, os.environ.get('TASK_ID', None)) set_interactive_task(post_task, interactive) treeherder_transform.add_treeherder_revision_info( post_task['task'], params['head_rev'], params['revision_hash']) graph['tasks'].append(post_task) for test in build['dependents']: test = test['allowed_build_tasks'][build['task']] # TODO additional-parameters is currently not an option, only # enabled for build tasks test_parameters = merge_dicts( build_parameters, test.get('additional-parameters', {})) test_parameters = copy.copy(build_parameters) test_definition = templates.load(test['task'], {})['task'] chunk_config = test_definition['extra'].get('chunks', {}) # Allow branch configs to override task level chunking... if 'chunks' in test: chunk_config['total'] = test['chunks'] chunked = 'total' in chunk_config if chunked: test_parameters['total_chunks'] = chunk_config['total'] if 'suite' in test_definition['extra']: suite_config = test_definition['extra']['suite'] test_parameters['suite'] = suite_config['name'] test_parameters['flavor'] = suite_config.get('flavor', '') for chunk in range(1, chunk_config.get('total', 1) + 1): if 'only_chunks' in test and chunked and \ chunk not in test['only_chunks']: continue if chunked: test_parameters['chunk'] = chunk test_task = configure_dependent_task( test['task'], test_parameters, slugid(), templates, build_treeherder_config) normalize_image_details(graph, test_task, seen_images, build_parameters, os.environ.get('TASK_ID', None)) set_interactive_task(test_task, interactive) if params['revision_hash']: treeherder_transform.add_treeherder_revision_info( test_task['task'], params['head_rev'], params['revision_hash']) routes_transform.decorate_task_treeherder_routes( test_task['task'], treeherder_route) # This will schedule test jobs N times for i in range(0, trigger_tests): graph['tasks'].append(test_task) # If we're scheduling more tasks each have to be unique test_task = copy.deepcopy(test_task) test_task['taskId'] = slugid() define_task = DEFINE_TASK.format( test_task['task']['workerType']) graph['scopes'].add(define_task) graph['scopes'] |= set(test_task['task'].get('scopes', [])) graph['scopes'] = sorted(graph['scopes']) if params['print_names_only']: tIDs = defaultdict(list) def print_task(task, indent=0): print('{}- {}'.format(' ' * indent, task['task']['metadata']['name'])) for child in tIDs[task['taskId']]: print_task(child, indent=indent + 2) # build a dependency map for task in graph['tasks']: if 'requires' in task: for tID in task['requires']: tIDs[tID].append(task) # recursively print root tasks for task in graph['tasks']: if 'requires' not in task: print_task(task) return # When we are extending the graph remove extra fields... if params['ci'] is True: graph.pop('scopes', None) graph.pop('metadata', None) print(json.dumps(graph, indent=4, sort_keys=True))
def create_graph(self, **params): from taskcluster_graph.commit_parser import parse_commit from taskcluster_graph.slugid import slugid from taskcluster_graph.from_now import ( json_time_from_now, current_json_time, ) from taskcluster_graph.templates import Templates import taskcluster_graph.build_task project = params['project'] message = params.get('message', '') if project == 'try' else DEFAULT_TRY # Message would only be blank when not created from decision task if project == 'try' and not message: sys.stderr.write( "Must supply commit message when creating try graph. " \ "Example: --message='try: -b do -p all -u all'" ) sys.exit(1) templates = Templates(ROOT) job_path = os.path.join(ROOT, 'tasks', 'branches', project, 'job_flags.yml') job_path = job_path if os.path.exists(job_path) else DEFAULT_JOB_PATH jobs = templates.load(job_path, {}) job_graph = parse_commit(message, jobs) mozharness = load_mozharness_info() # Template parameters used when expanding the graph parameters = dict(gaia_info().items() + { 'index': 'index.garbage.staging.mshal-testing', #TODO 'project': project, 'pushlog_id': params.get('pushlog_id', 0), 'docker_image': docker_image, 'base_repository': params['base_repository'] or \ params['head_repository'], 'head_repository': params['head_repository'], 'head_ref': params['head_ref'] or params['head_rev'], 'head_rev': params['head_rev'], 'owner': params['owner'], 'from_now': json_time_from_now, 'now': current_json_time(), 'mozharness_repository': mozharness['repo'], 'mozharness_rev': mozharness['revision'], 'mozharness_ref':mozharness.get('reference', mozharness['revision']), 'revision_hash': params['revision_hash'] }.items()) treeherder_route = '{}.{}'.format(params['project'], params.get('revision_hash', '')) routes_file = os.path.join(ROOT, 'routes.json') with open(routes_file) as f: contents = json.load(f) json_routes = contents['routes'] # TODO: Nightly and/or l10n routes # Task graph we are generating for taskcluster... graph = {'tasks': [], 'scopes': []} if params['revision_hash']: for env in TREEHERDER_ROUTES: graph['scopes'].append('queue:route:{}.{}'.format( TREEHERDER_ROUTES[env], treeherder_route)) graph['metadata'] = { 'source': 'http://todo.com/what/goes/here', 'owner': params['owner'], # TODO: Add full mach commands to this example? 'description': 'Task graph generated via ./mach taskcluster-graph', 'name': 'task graph local' } for build in job_graph: build_parameters = dict(parameters) build_parameters['build_slugid'] = slugid() build_task = templates.load(build['task'], build_parameters) if params['revision_hash']: decorate_task_treeherder_routes(build_task['task'], treeherder_route) decorate_task_json_routes(build, build_task['task'], json_routes, build_parameters) # Ensure each build graph is valid after construction. taskcluster_graph.build_task.validate(build_task) graph['tasks'].append(build_task) test_packages_url, tests_url = None, None if 'test_packages' in build_task['task']['extra']['locations']: test_packages_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['test_packages']) if 'tests' in build_task['task']['extra']['locations']: tests_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['tests']) build_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['build']) # img_url is only necessary for device builds img_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations'].get('img', '')) define_task = DEFINE_TASK.format(build_task['task']['workerType']) graph['scopes'].append(define_task) graph['scopes'].extend(build_task['task'].get('scopes', [])) route_scopes = map(lambda route: 'queue:route:' + route, build_task['task'].get('routes', [])) graph['scopes'].extend(route_scopes) # Treeherder symbol configuration for the graph required for each # build so tests know which platform they belong to. build_treeherder_config = build_task['task']['extra']['treeherder'] if 'machine' not in build_treeherder_config: message = '({}), extra.treeherder.machine required for all builds' raise ValueError(message.format(build['task'])) if 'build' not in build_treeherder_config: build_treeherder_config['build'] = \ build_treeherder_config['machine'] if 'collection' not in build_treeherder_config: build_treeherder_config['collection'] = {'opt': True} if len(build_treeherder_config['collection'].keys()) != 1: message = '({}), extra.treeherder.collection must contain one type' raise ValueError(message.fomrat(build['task'])) for post_build in build['post-build']: # copy over the old parameters to update the template post_parameters = copy.copy(build_parameters) post_task = configure_dependent_task(post_build['task'], post_parameters, slugid(), templates, build_treeherder_config) graph['tasks'].append(post_task) for test in build['dependents']: test = test['allowed_build_tasks'][build['task']] test_parameters = copy.copy(build_parameters) test_parameters['build_url'] = build_url test_parameters['img_url'] = img_url if tests_url: test_parameters['tests_url'] = tests_url if test_packages_url: test_parameters['test_packages_url'] = test_packages_url test_definition = templates.load(test['task'], {})['task'] chunk_config = test_definition['extra']['chunks'] # Allow branch configs to override task level chunking... if 'chunks' in test: chunk_config['total'] = test['chunks'] test_parameters['total_chunks'] = chunk_config['total'] for chunk in range(1, chunk_config['total'] + 1): if 'only_chunks' in test and \ chunk not in test['only_chunks']: continue test_parameters['chunk'] = chunk test_task = configure_dependent_task( test['task'], test_parameters, slugid(), templates, build_treeherder_config) if params['revision_hash']: decorate_task_treeherder_routes( test_task['task'], treeherder_route) graph['tasks'].append(test_task) define_task = DEFINE_TASK.format( test_task['task']['workerType']) graph['scopes'].append(define_task) graph['scopes'].extend(test_task['task'].get('scopes', [])) graph['scopes'] = list(set(graph['scopes'])) # When we are extending the graph remove extra fields... if params['ci'] is True: graph.pop('scopes', None) graph.pop('metadata', None) print(json.dumps(graph, indent=4))
def create_graph(self, **params): from functools import partial from slugid import nice as slugid import taskcluster_graph.transform.routes as routes_transform from taskcluster_graph.commit_parser import parse_commit from taskcluster_graph.image_builder import (docker_image, normalize_image_details, task_id_for_image) from taskcluster_graph.from_now import ( json_time_from_now, current_json_time, ) from taskcluster_graph.templates import Templates import taskcluster_graph.build_task if params['dry_run']: from taskcluster_graph.dry_run import ( json_time_from_now, current_json_time, slugid, ) project = params['project'] message = params.get('message', '') if project == 'try' else DEFAULT_TRY # Message would only be blank when not created from decision task if project == 'try' and not message: sys.stderr.write( "Must supply commit message when creating try graph. " \ "Example: --message='try: -b do -p all -u all'" ) sys.exit(1) templates = Templates(ROOT) job_path = os.path.join(ROOT, 'tasks', 'branches', project, 'job_flags.yml') job_path = job_path if os.path.exists(job_path) else DEFAULT_JOB_PATH jobs = templates.load(job_path, {}) job_graph = parse_commit(message, jobs) cmdline_interactive = params.get('interactive', False) # Default to current time if querying the head rev fails pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime()) pushinfo = query_pushinfo(params['head_repository'], params['head_rev']) if pushinfo: pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(pushinfo.pushdate)) # Template parameters used when expanding the graph seen_images = {} parameters = dict(gaia_info().items() + { 'index': 'index', 'project': project, 'pushlog_id': params.get('pushlog_id', 0), 'docker_image': docker_image, 'task_id_for_image': partial(task_id_for_image, seen_images, project), 'base_repository': params['base_repository'] or \ params['head_repository'], 'head_repository': params['head_repository'], 'head_ref': params['head_ref'] or params['head_rev'], 'head_rev': params['head_rev'], 'pushdate': pushdate, 'pushtime': pushdate[8:], 'year': pushdate[0:4], 'month': pushdate[4:6], 'day': pushdate[6:8], 'owner': params['owner'], 'from_now': json_time_from_now, 'now': current_json_time(), 'revision_hash': params['revision_hash'] }.items()) treeherder_route = '{}.{}'.format(params['project'], params.get('revision_hash', '')) routes_file = os.path.join(ROOT, 'routes.json') with open(routes_file) as f: contents = json.load(f) json_routes = contents['routes'] # TODO: Nightly and/or l10n routes # Task graph we are generating for taskcluster... graph = {'tasks': [], 'scopes': []} if params['revision_hash']: for env in routes_transform.TREEHERDER_ROUTES: route = 'queue:route:{}.{}'.format( routes_transform.TREEHERDER_ROUTES[env], treeherder_route) graph['scopes'].append(route) graph['metadata'] = { 'source': 'http://todo.com/what/goes/here', 'owner': params['owner'], # TODO: Add full mach commands to this example? 'description': 'Task graph generated via ./mach taskcluster-graph', 'name': 'task graph local' } all_routes = {} for build in job_graph: interactive = cmdline_interactive or build["interactive"] build_parameters = merge_dicts(parameters, build['additional-parameters']) build_parameters['build_slugid'] = slugid() build_task = templates.load(build['task'], build_parameters) # Copy build_* attributes to expose them to post-build tasks # as well as json routes and tests task_extra = build_task['task']['extra'] build_parameters['build_name'] = task_extra['build_name'] build_parameters['build_type'] = task_extra['build_type'] build_parameters['build_product'] = task_extra['build_product'] normalize_image_details(graph, build_task, seen_images, build_parameters, os.environ.get('TASK_ID', None)) set_interactive_task(build_task, interactive) # try builds don't use cache if project == "try": remove_caches_from_task(build_task) if params['revision_hash']: routes_transform.decorate_task_treeherder_routes( build_task['task'], treeherder_route) routes_transform.decorate_task_json_routes( build_task['task'], json_routes, build_parameters) # Ensure each build graph is valid after construction. taskcluster_graph.build_task.validate(build_task) graph['tasks'].append(build_task) test_packages_url, tests_url, mozharness_url = None, None, None if 'test_packages' in build_task['task']['extra']['locations']: test_packages_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['test_packages']) if 'tests' in build_task['task']['extra']['locations']: tests_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['tests']) if 'mozharness' in build_task['task']['extra']['locations']: mozharness_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['mozharness']) build_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['build']) build_parameters['build_url'] = build_url # img_url is only necessary for device builds img_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations'].get('img', '')) build_parameters['img_url'] = img_url define_task = DEFINE_TASK.format(build_task['task']['workerType']) for route in build_task['task'].get('routes', []): if route.startswith('index.gecko.v2') and route in all_routes: raise Exception( "Error: route '%s' is in use by multiple tasks: '%s' and '%s'" % ( route, build_task['task']['metadata']['name'], all_routes[route], )) all_routes[route] = build_task['task']['metadata']['name'] graph['scopes'].append(define_task) graph['scopes'].extend(build_task['task'].get('scopes', [])) route_scopes = map(lambda route: 'queue:route:' + route, build_task['task'].get('routes', [])) graph['scopes'].extend(route_scopes) # Treeherder symbol configuration for the graph required for each # build so tests know which platform they belong to. build_treeherder_config = build_task['task']['extra']['treeherder'] if 'machine' not in build_treeherder_config: message = '({}), extra.treeherder.machine required for all builds' raise ValueError(message.format(build['task'])) if 'build' not in build_treeherder_config: build_treeherder_config['build'] = \ build_treeherder_config['machine'] if 'collection' not in build_treeherder_config: build_treeherder_config['collection'] = {'opt': True} if len(build_treeherder_config['collection'].keys()) != 1: message = '({}), extra.treeherder.collection must contain one type' raise ValueError(message.fomrat(build['task'])) for post_build in build['post-build']: # copy over the old parameters to update the template # TODO additional-parameters is currently not an option, only # enabled for build tasks post_parameters = merge_dicts( build_parameters, post_build.get('additional-parameters', {})) post_task = configure_dependent_task(post_build['task'], post_parameters, slugid(), templates, build_treeherder_config) normalize_image_details(graph, post_task, seen_images, build_parameters, os.environ.get('TASK_ID', None)) set_interactive_task(post_task, interactive) graph['tasks'].append(post_task) for test in build['dependents']: test = test['allowed_build_tasks'][build['task']] # TODO additional-parameters is currently not an option, only # enabled for build tasks test_parameters = merge_dicts( build_parameters, test.get('additional-parameters', {})) test_parameters = copy.copy(build_parameters) if tests_url: test_parameters['tests_url'] = tests_url if test_packages_url: test_parameters['test_packages_url'] = test_packages_url if mozharness_url: test_parameters['mozharness_url'] = mozharness_url test_definition = templates.load(test['task'], {})['task'] chunk_config = test_definition['extra'].get('chunks', {}) # Allow branch configs to override task level chunking... if 'chunks' in test: chunk_config['total'] = test['chunks'] chunked = 'total' in chunk_config if chunked: test_parameters['total_chunks'] = chunk_config['total'] if 'suite' in test_definition['extra']: suite_config = test_definition['extra']['suite'] test_parameters['suite'] = suite_config['name'] test_parameters['flavor'] = suite_config.get('flavor', '') for chunk in range(1, chunk_config.get('total', 1) + 1): if 'only_chunks' in test and chunked and \ chunk not in test['only_chunks']: continue if chunked: test_parameters['chunk'] = chunk test_task = configure_dependent_task( test['task'], test_parameters, slugid(), templates, build_treeherder_config) normalize_image_details(graph, test_task, seen_images, build_parameters, os.environ.get('TASK_ID', None)) set_interactive_task(test_task, interactive) if params['revision_hash']: routes_transform.decorate_task_treeherder_routes( test_task['task'], treeherder_route) graph['tasks'].append(test_task) define_task = DEFINE_TASK.format( test_task['task']['workerType']) graph['scopes'].append(define_task) graph['scopes'].extend(test_task['task'].get('scopes', [])) graph['scopes'] = list(set(graph['scopes'])) if params['print_names_only']: tIDs = defaultdict(list) def print_task(task, indent=0): print('{}- {}'.format(' ' * indent, task['task']['metadata']['name'])) for child in tIDs[task['taskId']]: print_task(child, indent=indent + 2) # build a dependency map for task in graph['tasks']: if 'requires' in task: for tID in task['requires']: tIDs[tID].append(task) # recursively print root tasks for task in graph['tasks']: if 'requires' not in task: print_task(task) return # When we are extending the graph remove extra fields... if params['ci'] is True: graph.pop('scopes', None) graph.pop('metadata', None) print(json.dumps(graph, indent=4))
def create_graph(self, **params): project = params['project'] message = params.get('message', '') if project == 'try' else DEFAULT_TRY # Message would only be blank when not created from decision task if project == 'try' and not message: sys.stderr.write( "Must supply commit message when creating try graph. " \ "Example: --message='try: -b do -p all -u all'" ) sys.exit(1) templates = Templates(ROOT) job_path = os.path.join(ROOT, 'tasks', 'branches', project, 'job_flags.yml') job_path = job_path if os.path.exists(job_path) else DEFAULT_JOB_PATH jobs = templates.load(job_path, {}) job_graph = parse_commit(message, jobs) # Template parameters used when expanding the graph parameters = dict(gaia_info().items() + { 'docker_image': docker_image, 'base_repository': params['base_repository'] or \ params['head_repository'], 'head_repository': params['head_repository'], 'head_ref': params['head_ref'] or params['head_rev'], 'head_rev': params['head_rev'], 'owner': params['owner'], 'from_now': json_time_from_now, 'now': datetime.datetime.now().isoformat(), 'mozharness_repository': params['mozharness_repository'], 'mozharness_rev': params['mozharness_rev'], 'revision_hash': params['revision_hash'] }.items()) treeherder_route = '{}.{}.{}'.format(TREEHERDER_ROUTE_PREFIX, params['project'], params.get('revision_hash', '')) # Task graph we are generating for taskcluster... graph = {'tasks': [], 'scopes': []} if params['revision_hash']: graph['scopes'].append('queue:route:{}'.format(treeherder_route)) graph['metadata'] = { 'source': 'http://todo.com/what/goes/here', 'owner': params['owner'], # TODO: Add full mach commands to this example? 'description': 'Task graph generated via ./mach taskcluster-graph', 'name': 'task graph local' } for build in job_graph: build_parameters = dict(parameters) build_parameters['build_slugid'] = slugid() build_task = templates.load(build['task'], build_parameters) if 'routes' not in build_task['task']: build_task['task']['routes'] = [] if params['revision_hash']: build_task['task']['routes'].append(treeherder_route) # Ensure each build graph is valid after construction. taskcluster_graph.build_task.validate(build_task) graph['tasks'].append(build_task) tests_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['tests']) build_url = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations']['build']) define_task = DEFINE_TASK.format(build_task['task']['workerType']) graph['scopes'].append(define_task) graph['scopes'].extend(build_task['task'].get('scopes', [])) # Treeherder symbol configuration for the graph required for each # build so tests know which platform they belong to. build_treeherder_config = build_task['task']['extra']['treeherder'] if 'machine' not in build_treeherder_config: message = '({}), extra.treeherder.machine required for all builds' raise ValueError(message.format(build['task'])) if 'build' not in build_treeherder_config: build_treeherder_config['build'] = \ build_treeherder_config['machine'] if 'collection' not in build_treeherder_config: build_treeherder_config['collection'] = {'opt': True} if len(build_treeherder_config['collection'].keys()) != 1: message = '({}), extra.treeherder.collection must contain one type' raise ValueError(message.fomrat(build['task'])) for test in build['dependents']: test = test['allowed_build_tasks'][build['task']] test_parameters = copy.copy(build_parameters) test_parameters['build_url'] = build_url test_parameters['tests_url'] = tests_url test_parameters['total_chunks'] = 1 if 'chunks' in test: test_parameters['total_chunks'] = test['chunks'] for chunk in range(1, test_parameters['total_chunks'] + 1): if 'only_chunks' in test and \ chunk not in test['only_chunks']: continue test_parameters['chunk'] = chunk test_task = templates.load(test['task'], test_parameters) test_task['taskId'] = slugid() if 'requires' not in test_task: test_task['requires'] = [] test_task['requires'].append( test_parameters['build_slugid']) if 'treeherder' not in test_task['task']['extra']: test_task['task']['extra']['treeherder'] = {} # Copy over any treeherder configuration from the build so # tests show up under the same platform... test_treeherder_config = test_task['task']['extra'][ 'treeherder'] test_treeherder_config['collection'] = \ build_treeherder_config.get('collection', {}) test_treeherder_config['build'] = \ build_treeherder_config.get('build', {}) test_treeherder_config['machine'] = \ build_treeherder_config.get('machine', {}) if 'routes' not in test_task['task']: test_task['task']['routes'] = [] if 'scopes' not in test_task['task']: test_task['task']['scopes'] = [] if params['revision_hash']: test_task['task']['routes'].append(treeherder_route) test_task['task']['scopes'].append( 'queue:route:{}'.format(treeherder_route)) graph['tasks'].append(test_task) define_task = DEFINE_TASK.format( test_task['task']['workerType']) graph['scopes'].append(define_task) graph['scopes'].extend(test_task['task'].get('scopes', [])) graph['scopes'] = list(set(graph['scopes'])) # When we are extending the graph remove extra fields... if params['ci'] is True: graph.pop('scopes', None) graph.pop('metadata', None) print(json.dumps(graph, indent=4))
def load_tasks(self, params): root = os.path.abspath( os.path.join(self.path, self.config['legacy_path'])) project = params['project'] # NOTE: message is ignored here; we always use DEFAULT_TRY, then filter the # resulting task graph later message = DEFAULT_TRY templates = Templates(root) job_path = os.path.join(root, 'tasks', 'branches', project, 'job_flags.yml') job_path = job_path if os.path.exists(job_path) else \ os.path.join(root, DEFAULT_JOB_PATH) jobs = templates.load(job_path, {}) job_graph, trigger_tests = parse_commit(message, jobs) cmdline_interactive = params.get('interactive', False) # Default to current time if querying the head rev fails pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime()) vcs_info = query_vcs_info(params['head_repository'], params['head_rev']) changed_files = set() if vcs_info: pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(vcs_info.pushdate)) logger.debug('{} commits influencing task scheduling:'.format( len(vcs_info.changesets))) for c in vcs_info.changesets: logger.debug("{cset} {desc}".format( cset=c['node'][0:12], desc=c['desc'].splitlines()[0].encode('ascii', 'ignore'))) changed_files |= set(c['files']) # Template parameters used when expanding the graph seen_images = {} parameters = dict( gaia_info().items() + { 'index': 'index', 'project': project, 'pushlog_id': params.get('pushlog_id', 0), 'docker_image': docker_image, 'task_id_for_image': partial(task_id_for_image, seen_images, project), 'base_repository': params['base_repository'] or params['head_repository'], 'head_repository': params['head_repository'], 'head_ref': params['head_ref'] or params['head_rev'], 'head_rev': params['head_rev'], 'pushdate': pushdate, 'pushtime': pushdate[8:], 'year': pushdate[0:4], 'month': pushdate[4:6], 'day': pushdate[6:8], 'owner': params['owner'], 'level': params['level'], 'from_now': json_time_from_now, 'now': current_json_time(), 'revision_hash': params['revision_hash'] }.items()) treeherder_route = '{}.{}'.format(params['project'], params.get('revision_hash', '')) routes_file = os.path.join(root, 'routes.json') with open(routes_file) as f: contents = json.load(f) json_routes = contents['routes'] # TODO: Nightly and/or l10n routes # Task graph we are generating for taskcluster... graph = { 'tasks': [], 'scopes': set(), } if params['revision_hash']: for env in routes_transform.TREEHERDER_ROUTES: route = 'queue:route:{}.{}'.format( routes_transform.TREEHERDER_ROUTES[env], treeherder_route) graph['scopes'].add(route) graph['metadata'] = { 'source': '{repo}file/{rev}/testing/taskcluster/mach_commands.py'.format( repo=params['head_repository'], rev=params['head_rev']), 'owner': params['owner'], # TODO: Add full mach commands to this example? 'description': 'Task graph generated via ./mach taskcluster-graph', 'name': 'task graph local' } # Filter the job graph according to conditions met by this invocation run. def should_run(task): # Old style build or test task that doesn't define conditions. Always runs. if 'when' not in task: return True when = task['when'] # If the task defines file patterns and we have a set of changed # files to compare against, only run if a file pattern matches one # of the changed files. file_patterns = when.get('file_patterns', None) if file_patterns and changed_files: # Always consider changes to the task definition itself file_patterns.append( 'testing/taskcluster/{task}'.format(task=task['task'])) for pattern in file_patterns: for path in changed_files: if mozpackmatch(path, pattern): logger.debug( 'scheduling {task} because pattern {pattern} ' 'matches {path}'.format( task=task['task'], pattern=pattern, path=path, )) return True # No file patterns matched. Discard task. logger.debug( 'discarding {task} because no relevant files changed'. format(task=task['task'], pattern=pattern, path=path)) return False return True job_graph = filter(should_run, job_graph) all_routes = {} for build in job_graph: logging.debug("loading build task {}".format(build['task'])) interactive = cmdline_interactive or build["interactive"] build_parameters = merge_dicts(parameters, build['additional-parameters']) build_parameters['build_slugid'] = mklabel() build_parameters[ 'source'] = '{repo}file/{rev}/testing/taskcluster/{file}'.format( repo=params['head_repository'], rev=params['head_rev'], file=build['task']) build_task = templates.load(build['task'], build_parameters) # Copy build_* attributes to expose them to post-build tasks # as well as json routes and tests task_extra = build_task['task']['extra'] build_parameters['build_name'] = task_extra['build_name'] build_parameters['build_type'] = task_extra['build_type'] build_parameters['build_product'] = task_extra['build_product'] normalize_image_details(graph, build_task, seen_images, build_parameters, os.environ.get('TASK_ID', None)) set_interactive_task(build_task, interactive) # try builds don't use cache if project == "try": remove_caches_from_task(build_task) set_expiration(build_task, json_time_from_now(TRY_EXPIRATION)) if params['revision_hash']: treeherder_transform.add_treeherder_revision_info( build_task['task'], params['head_rev'], params['revision_hash']) routes_transform.decorate_task_treeherder_routes( build_task['task'], treeherder_route) routes_transform.decorate_task_json_routes( build_task['task'], json_routes, build_parameters) # Ensure each build graph is valid after construction. taskcluster_graph.build_task.validate(build_task) attributes = build_task['attributes'] = { 'kind': 'legacy', 'legacy_kind': 'build' } if 'build_name' in build: attributes['build_platform'] = build['build_name'] if 'build_type' in task_extra: attributes['build_type'] = { 'dbg': 'debug' }.get(task_extra['build_type'], task_extra['build_type']) if build.get('is_job'): attributes['job'] = build['build_name'] attributes['legacy_kind'] = 'job' graph['tasks'].append(build_task) for location in build_task['task']['extra'].get('locations', {}): build_parameters['{}_url'.format( location)] = ARTIFACT_URL.format( build_parameters['build_slugid'], build_task['task']['extra']['locations'][location]) for url in build_task['task']['extra'].get('url', {}): build_parameters['{}_url'.format(url)] = \ build_task['task']['extra']['url'][url] define_task = DEFINE_TASK.format(build_task['task']['workerType']) for route in build_task['task'].get('routes', []): if route.startswith('index.gecko.v2') and route in all_routes: raise Exception( "Error: route '%s' is in use by multiple tasks: '%s' and '%s'" % ( route, build_task['task']['metadata']['name'], all_routes[route], )) all_routes[route] = build_task['task']['metadata']['name'] graph['scopes'].add(define_task) graph['scopes'] |= set(build_task['task'].get('scopes', [])) route_scopes = map(lambda route: 'queue:route:' + route, build_task['task'].get('routes', [])) graph['scopes'] |= set(route_scopes) # Treeherder symbol configuration for the graph required for each # build so tests know which platform they belong to. build_treeherder_config = build_task['task']['extra']['treeherder'] if 'machine' not in build_treeherder_config: message = '({}), extra.treeherder.machine required for all builds' raise ValueError(message.format(build['task'])) if 'build' not in build_treeherder_config: build_treeherder_config['build'] = \ build_treeherder_config['machine'] if 'collection' not in build_treeherder_config: build_treeherder_config['collection'] = {'opt': True} if len(build_treeherder_config['collection'].keys()) != 1: message = '({}), extra.treeherder.collection must contain one type' raise ValueError(message.fomrat(build['task'])) for post_build in build['post-build']: # copy over the old parameters to update the template # TODO additional-parameters is currently not an option, only # enabled for build tasks post_parameters = merge_dicts( build_parameters, post_build.get('additional-parameters', {})) post_task = configure_dependent_task(post_build['task'], post_parameters, mklabel(), templates, build_treeherder_config) normalize_image_details(graph, post_task, seen_images, build_parameters, os.environ.get('TASK_ID', None)) set_interactive_task(post_task, interactive) treeherder_transform.add_treeherder_revision_info( post_task['task'], params['head_rev'], params['revision_hash']) if project == "try": set_expiration(post_task, json_time_from_now(TRY_EXPIRATION)) post_task['attributes'] = attributes.copy() post_task['attributes']['legacy_kind'] = 'post_build' post_task['attributes']['post_build'] = post_build['job_flag'] graph['tasks'].append(post_task) for test in build['dependents']: test = test['allowed_build_tasks'][build['task']] # TODO additional-parameters is currently not an option, only # enabled for build tasks test_parameters = merge_dicts( build_parameters, test.get('additional-parameters', {})) test_parameters = copy.copy(build_parameters) test_definition = templates.load(test['task'], {})['task'] chunk_config = test_definition['extra'].get('chunks', {}) # Allow branch configs to override task level chunking... if 'chunks' in test: chunk_config['total'] = test['chunks'] chunked = 'total' in chunk_config if chunked: test_parameters['total_chunks'] = chunk_config['total'] if 'suite' in test_definition['extra']: suite_config = test_definition['extra']['suite'] test_parameters['suite'] = suite_config['name'] test_parameters['flavor'] = suite_config.get('flavor', '') for chunk in range(1, chunk_config.get('total', 1) + 1): if 'only_chunks' in test and chunked and \ chunk not in test['only_chunks']: continue if chunked: test_parameters['chunk'] = chunk test_task = configure_dependent_task( test['task'], test_parameters, mklabel(), templates, build_treeherder_config) normalize_image_details(graph, test_task, seen_images, build_parameters, os.environ.get('TASK_ID', None)) set_interactive_task(test_task, interactive) if params['revision_hash']: treeherder_transform.add_treeherder_revision_info( test_task['task'], params['head_rev'], params['revision_hash']) routes_transform.decorate_task_treeherder_routes( test_task['task'], treeherder_route) if project == "try": set_expiration(test_task, json_time_from_now(TRY_EXPIRATION)) test_task['attributes'] = attributes.copy() test_task['attributes']['legacy_kind'] = 'unittest' test_task['attributes']['test_platform'] = attributes[ 'build_platform'] test_task['attributes']['unittest_try_name'] = test[ 'unittest_try_name'] for param, attr in [('suite', 'unittest_suite'), ('flavor', 'unittest_flavor'), ('chunk', 'test_chunk')]: if param in test_parameters: test_task['attributes'][attr] = str( test_parameters[param]) # This will schedule test jobs N times for i in range(0, trigger_tests): graph['tasks'].append(test_task) # If we're scheduling more tasks each have to be unique test_task = copy.deepcopy(test_task) test_task['taskId'] = mklabel() define_task = DEFINE_TASK.format( test_task['task']['workerType']) graph['scopes'].add(define_task) graph['scopes'] |= set(test_task['task'].get('scopes', [])) graph['scopes'] = sorted(graph['scopes']) # save the graph for later, when taskgraph asks for additional information # such as dependencies self.graph = graph self.tasks_by_label = {t['taskId']: t for t in self.graph['tasks']} # Convert to a dictionary of tasks. The process above has invented a # taskId for each task, and we use those as the *labels* for the tasks; # taskgraph will later assign them new taskIds. return [ Task(self, t['taskId'], task=t['task'], attributes=t['attributes']) for t in self.graph['tasks'] ]
def setUp(self): abs_path = os.path.abspath(os.path.dirname(__file__)) self.subject = Templates(os.path.join(abs_path, 'fixtures'))
def test_invalid_path(self): with self.assertRaisesRegexp(TemplatesException, 'must be a directory'): Templates('/zomg/not/a/dir')