def geckoprofile_action(parameters, graph_config, input, task_group_id, task_id): task = taskcluster.get_task_definition(task_id) label = task['metadata']['name'] pushes = [] depth = 2 end_id = int(parameters['pushlog_id']) while True: start_id = max(end_id - depth, 0) pushlog_url = PUSHLOG_TMPL.format(parameters['head_repository'], start_id, end_id) r = requests.get(pushlog_url) r.raise_for_status() pushes = pushes + list(r.json()['pushes'].keys()) if len(pushes) >= depth: break end_id = start_id - 1 start_id -= depth if start_id < 0: break pushes = sorted(pushes)[-depth:] backfill_pushes = [] for push in pushes: try: full_task_graph = get_artifact_from_index( INDEX_TMPL.format(parameters['project'], push), 'public/full-task-graph.json') _, full_task_graph = TaskGraph.from_json(full_task_graph) label_to_taskid = get_artifact_from_index( INDEX_TMPL.format(parameters['project'], push), 'public/label-to-taskid.json') push_params = get_artifact_from_index( INDEX_TMPL.format(parameters['project'], push), 'public/parameters.yml') push_decision_task_id = find_decision_task(push_params, graph_config) except HTTPError as e: logger.info('Skipping {} due to missing index artifacts! Error: {}'.format(push, e)) continue if label in full_task_graph.tasks.keys(): def modifier(task): if task.label != label: return task cmd = task.task['payload']['command'] task.task['payload']['command'] = add_args_to_perf_command( cmd, ['--gecko-profile']) task.task['extra']['treeherder']['symbol'] += '-p' return task create_tasks(graph_config, [label], full_task_graph, label_to_taskid, push_params, push_decision_task_id, push, modifier=modifier) backfill_pushes.append(push) else: logging.info('Could not find {} on {}. Skipping.'.format(label, push)) combine_task_graph_files(backfill_pushes)
def backfill_action(parameters, graph_config, input, task_group_id, task_id, task): label = task['metadata']['name'] pushes = [] depth = input.get('depth', 5) end_id = int(parameters['pushlog_id']) - 1 while True: start_id = max(end_id - depth, 0) pushlog_url = PUSHLOG_TMPL.format(parameters['head_repository'], start_id, end_id) r = requests.get(pushlog_url) r.raise_for_status() pushes = pushes + r.json()['pushes'].keys() if len(pushes) >= depth: break end_id = start_id - 1 start_id -= depth if start_id < 0: break pushes = sorted(pushes)[-depth:] for push in pushes: try: full_task_graph = get_artifact_from_index( INDEX_TMPL.format(parameters['project'], push), 'public/full-task-graph.json') _, full_task_graph = TaskGraph.from_json(full_task_graph) label_to_taskid = get_artifact_from_index( INDEX_TMPL.format(parameters['project'], push), 'public/label-to-taskid.json') push_params = get_artifact_from_index( INDEX_TMPL.format(parameters['project'], push), 'public/parameters.yml') push_decision_task_id = find_decision_task(push_params, graph_config) except HTTPError as e: logger.info( 'Skipping {} due to missing index artifacts! Error: {}'.format( push, e)) continue if label in full_task_graph.tasks.keys(): create_tasks([label], full_task_graph, label_to_taskid, push_params, push_decision_task_id, push) else: logging.info('Could not find {} on {}. Skipping.'.format( label, push))
def backfill_action(parameters, input, task_group_id, task_id, task): label = task['metadata']['name'] pushes = [] depth = input.get('depth', 5) end_id = int(parameters['pushlog_id']) - 1 while True: start_id = max(end_id - depth, 0) pushlog_url = PUSHLOG_TMPL.format(parameters['head_repository'], start_id, end_id) r = requests.get(pushlog_url) r.raise_for_status() pushes = pushes + r.json()['pushes'].keys() if len(pushes) >= depth: break end_id = start_id - 1 start_id -= depth if start_id < 0: break pushes = sorted(pushes)[-depth:] for push in pushes: full_task_graph = get_artifact_from_index( INDEX_TMPL.format(parameters['project'], push), 'public/full-task-graph.json') _, full_task_graph = TaskGraph.from_json(full_task_graph) label_to_taskid = get_artifact_from_index( INDEX_TMPL.format(parameters['project'], push), 'public/label-to-taskid.json') if label in full_task_graph.tasks.keys(): task = full_task_graph.tasks[label] dependencies = { name: label_to_taskid[label] for name, label in task.dependencies.iteritems() } task_def = resolve_task_references(task.label, task.task, dependencies) task_def.setdefault('dependencies', []).extend(dependencies.itervalues()) create_task(slugid(), task_def, parameters['level']) else: logging.info('Could not find {} on {}. Skipping.'.format( label, push))
def backfill_action(parameters, graph_config, input, task_group_id, task_id): task = taskcluster.get_task_definition(task_id) label = task['metadata']['name'] pushes = [] inclusive_tweak = 1 if input.get('inclusive') else 0 depth = input.get('depth', 5) + inclusive_tweak end_id = int(parameters['pushlog_id']) - (1 - inclusive_tweak) while True: start_id = max(end_id - depth, 0) pushlog_url = PUSHLOG_TMPL.format(parameters['head_repository'], start_id, end_id) r = requests.get(pushlog_url) r.raise_for_status() pushes = pushes + r.json()['pushes'].keys() if len(pushes) >= depth: break end_id = start_id - 1 start_id -= depth if start_id < 0: break pushes = sorted(pushes)[-depth:] backfill_pushes = [] for push in pushes: try: full_task_graph = get_artifact_from_index( INDEX_TMPL.format(parameters['project'], push), 'public/full-task-graph.json') _, full_task_graph = TaskGraph.from_json(full_task_graph) label_to_taskid = get_artifact_from_index( INDEX_TMPL.format(parameters['project'], push), 'public/label-to-taskid.json') push_params = get_artifact_from_index( INDEX_TMPL.format(parameters['project'], push), 'public/parameters.yml') push_decision_task_id = find_decision_task(push_params, graph_config) except HTTPError as e: logger.info( 'Skipping {} due to missing index artifacts! Error: {}'.format( push, e)) continue if label in full_task_graph.tasks.keys(): def modifier(task): if task.label != label: return task if input.get('testPath', ''): is_wpttest = 'web-platform' in task.task['metadata'][ 'name'] is_android = 'android' in task.task['metadata']['name'] gpu_required = False if (not is_wpttest) and \ ('gpu' in task.task['metadata']['name'] or 'webgl' in task.task['metadata']['name'] or ('reftest' in task.task['metadata']['name'] and 'jsreftest' not in task.task['metadata']['name'])): gpu_required = True # Create new cmd that runs a test-verify type job preamble_length = 3 verify_args = [ '--e10s', '--verify', '--total-chunk=1', '--this-chunk=1' ] if is_android: # no --e10s; todo, what about future geckoView? verify_args.remove('--e10s') if gpu_required: verify_args.append('--gpu-required') if 'testPath' in input: task.task['payload']['env'][ 'MOZHARNESS_TEST_PATHS'] = json.dumps({ task.task['extra']['suite']['flavor']: [input['testPath']] }) cmd_parts = task.task['payload']['command'] keep_args = [ '--installer-url', '--download-symbols', '--test-packages-url' ] cmd_parts = remove_args_from_command( cmd_parts, preamble_length, keep_args) cmd_parts = add_args_to_command(cmd_parts, verify_args) task.task['payload']['command'] = cmd_parts # morph the task label to a test-verify job pc = task.task['metadata']['name'].split('/') config = pc[-1].split('-') subtype = '' symbol = 'TV-bf' if gpu_required: subtype = '-gpu' symbol = 'TVg-bf' if is_wpttest: subtype = '-wpt' symbol = 'TVw-bf' if not is_android: subtype = "%s-e10s" % subtype newlabel = "%s/%s-test-verify%s" % (pc[0], config[0], subtype) task.task['metadata']['name'] = newlabel task.task['tags']['label'] = newlabel task.task['extra']['index']['rank'] = 0 task.task['extra']['chunks']['current'] = 1 task.task['extra']['chunks']['total'] = 1 task.task['extra']['suite']['name'] = 'test-verify' task.task['extra']['suite']['flavor'] = 'test-verify' task.task['extra']['treeherder']['symbol'] = symbol del task.task['extra']['treeherder']['groupSymbol'] return task times = input.get('times', 1) for i in xrange(times): create_tasks(graph_config, [label], full_task_graph, label_to_taskid, push_params, push_decision_task_id, push, modifier=modifier) backfill_pushes.append(push) else: logging.info('Could not find {} on {}. Skipping.'.format( label, push)) combine_task_graph_files(backfill_pushes)