Exemplo n.º 1
0
def get_action_yml(parameters):
    templates = Templates(os.path.join(GECKO, "taskcluster/taskgraph"))
    action_parameters = parameters.copy()
    action_parameters.update({
        "decision_task_id": "{{decision_task_id}}",
        "task_labels": "{{task_labels}}",
        "from_now": json_time_from_now,
        "now": current_json_time()
    })
    return templates.load('action.yml', action_parameters)
Exemplo n.º 2
0
def get_action_yml(parameters):
    templates = Templates(os.path.join(GECKO, "taskcluster/taskgraph"))
    action_parameters = parameters.copy()
    action_parameters.update({
        "decision_task_id": "{{decision_task_id}}",
        "task_labels": "{{task_labels}}",
        "from_now": json_time_from_now,
        "now": current_json_time()
    })
    return templates.load('action.yml', action_parameters)
Exemplo n.º 3
0
    def load_tasks(cls, kind, path, config, params, loaded_tasks):
        parameters = {
            'pushlog_id': params.get('pushlog_id', 0),
            'pushdate': params['moz_build_date'],
            'pushtime': params['moz_build_date'][8:],
            'year': params['moz_build_date'][0:4],
            'month': params['moz_build_date'][4:6],
            'day': params['moz_build_date'][6:8],
            'project': params['project'],
            'docker_image': docker_image,
            'base_repository': params['base_repository'] or params['head_repository'],
            'head_repository': params['head_repository'],
            'head_ref': params['head_ref'] or params['head_rev'],
            'head_rev': params['head_rev'],
            'owner': params['owner'],
            'level': params['level'],
            'source': '{repo}file/{rev}/taskcluster/ci/docker-image/image.yml'
                      .format(repo=params['head_repository'], rev=params['head_rev']),
            'index_image_prefix': INDEX_PREFIX,
            'artifact_path': 'public/image.tar.zst',
        }

        tasks = []
        templates = Templates(path)
        for image_name, image_symbol in config['images'].iteritems():
            context_path = os.path.join('taskcluster', 'docker', image_name)
            context_hash = generate_context_hash(GECKO, context_path, image_name)

            image_parameters = dict(parameters)
            image_parameters['image_name'] = image_name
            image_parameters['context_hash'] = context_hash

            image_task = templates.load('image.yml', image_parameters)
            attributes = {'image_name': image_name}

            # unique symbol for different docker image
            if 'extra' in image_task['task']:
                image_task['task']['extra']['treeherder']['symbol'] = image_symbol

            # As an optimization, if the context hash exists for a high level, that image
            # task ID will be used.  The reasoning behind this is that eventually everything ends
            # up on level 3 at some point if most tasks use this as a common image
            # for a given context hash, a worker within Taskcluster does not need to contain
            # the same image per branch.
            index_paths = ['{}.level-{}.{}.hash.{}'.format(
                                INDEX_PREFIX, level, image_name, context_hash)
                           for level in range(int(params['level']), 4)]

            tasks.append(cls(kind, 'build-docker-image-' + image_name,
                             task=image_task['task'], attributes=attributes,
                             index_paths=index_paths))

        return tasks
Exemplo n.º 4
0
def get_action_yml(parameters):
    templates = Templates(os.path.join(GECKO, "taskcluster/taskgraph"))
    action_parameters = parameters.copy()
    action_parameters.update(
        {
            "action": "{{action}}",
            "action_args": "{{action_args}}",
            "project": parameters["project"],
            "from_now": json_time_from_now,
            "now": current_json_time(),
        }
    )
    return templates.load("action.yml", action_parameters)
Exemplo n.º 5
0
def get_action_yml(parameters):
    templates = Templates(os.path.join(GECKO, "taskcluster/taskgraph"))
    action_parameters = parameters.copy()

    match = re.match(r'https://(hg.mozilla.org)/(.*?)/?$',
                     action_parameters['head_repository'])
    if not match:
        raise Exception('Unrecognized head_repository')
    repo_scope = 'assume:repo:{}/{}:*'.format(match.group(1), match.group(2))

    action_parameters.update({
        "action": "{{action}}",
        "action_args": "{{action_args}}",
        "repo_scope": repo_scope,
        "from_now": json_time_from_now,
        "now": current_json_time()
    })
    return templates.load('action.yml', action_parameters)
Exemplo n.º 6
0
def get_action_yml(parameters):
    templates = Templates(os.path.join(GECKO, "taskcluster/taskgraph"))
    action_parameters = parameters.copy()

    match = re.match(r'https://(hg.mozilla.org)/(.*?)/?$', action_parameters['head_repository'])
    if not match:
        raise Exception('Unrecognized head_repository')
    repo_scope = 'assume:repo:{}/{}:*'.format(
        match.group(1), match.group(2))

    action_parameters.update({
        "action": "{{action}}",
        "action_args": "{{action_args}}",
        "repo_scope": repo_scope,
        "from_now": json_time_from_now,
        "now": current_json_time()
    })
    return templates.load('action.yml', action_parameters)
Exemplo n.º 7
0
    def load_tasks(cls, kind, path, config, params, loaded_tasks):
        root = os.path.abspath(path)

        tasks = []
        for filename in config.get('jobs-from', []):
            templates = Templates(root)
            jobs = templates.load(filename, {})

            for name, job in jobs.iteritems():
                for artifact in job['unsigned-task']['artifacts']:
                    url = ARTIFACT_URL.format(
                        '<{}>'.format('unsigned-artifact'), artifact)
                    job['task']['payload']['unsignedArtifacts'].append(
                        {'task-reference': url})
                attributes = job.setdefault('attributes', {})
                attributes.update({'kind': 'signing'})
                tasks.append(cls(kind, name, job, attributes=attributes))

        return tasks
Exemplo n.º 8
0
    def load_tasks(cls, kind, path, config, params, loaded_tasks):
        root = os.path.abspath(path)

        tasks = []
        for filename in config.get('jobs-from', []):
            templates = Templates(root)
            jobs = templates.load(filename, {})

            for name, job in jobs.iteritems():
                for artifact in job['unsigned-task']['artifacts']:
                    url = ARTIFACT_URL.format('<{}>'.format('unsigned-artifact'), artifact)
                    job['task']['payload']['unsignedArtifacts'].append({
                        'task-reference': url
                    })
                attributes = job.setdefault('attributes', {})
                attributes.update({'kind': 'signing'})
                tasks.append(cls(kind, name, job, attributes=attributes))

        return tasks
Exemplo n.º 9
0
    def load_tasks(cls, kind, path, config, params, loaded_tasks):
        root = os.path.abspath(os.path.join(path, config["signing_path"]))

        # get each nightly-fennec and add its name to this task
        fennec_tasks = [t for t in loaded_tasks if t.attributes.get("kind") == "nightly-fennec"]

        tasks = []
        for fennec_task in fennec_tasks:
            templates = Templates(root)
            task = templates.load("signing.yml", {})

            artifacts = ["public/build/target.apk", "public/build/en-US/target.apk"]
            for artifact in artifacts:
                url = ARTIFACT_URL.format("<build-nightly-fennec>", artifact)
                task["task"]["payload"]["unsignedArtifacts"].append({"task-reference": url})

            attributes = {"kind": "signing"}
            tasks.append(cls(kind, "signing-nightly-fennec", task=task["task"], attributes=attributes))

        return tasks
Exemplo n.º 10
0
    def load_tasks(cls, kind, path, config, params, loaded_tasks):
        root = os.path.abspath(os.path.join(path, config['signing_path']))

        # get each nightly-fennec and add its name to this task
        fennec_tasks = [t for t in loaded_tasks if t.attributes.get('kind') == 'nightly-fennec']

        tasks = []
        for fennec_task in fennec_tasks:
            templates = Templates(root)
            task = templates.load('signing.yml', {})

            artifacts = ['public/build/target.apk',
                         'public/build/en-US/target.apk']
            for artifact in artifacts:
                url = ARTIFACT_URL.format('<build-nightly-fennec>', artifact)
                task['task']['payload']['unsignedArtifacts'].append({
                    'task-reference': url
                })

            attributes = {'kind': 'signing'}
            tasks.append(cls(kind, 'signing-nightly-fennec', task=task['task'],
                             attributes=attributes))

        return tasks
Exemplo n.º 11
0
    def load_tasks(cls, kind, path, config, params, loaded_tasks):
        root = os.path.abspath(os.path.join(path, config['legacy_path']))

        project = params['project']
        # NOTE: message is ignored here; we always use DEFAULT_TRY, then filter the
        # resulting task graph later
        message = DEFAULT_TRY

        templates = Templates(root)

        job_path = os.path.join(root, 'tasks', 'branches', project,
                                'job_flags.yml')
        job_path = job_path if os.path.exists(job_path) else \
            os.path.join(root, DEFAULT_JOB_PATH)

        jobs = templates.load(job_path, {})

        job_graph, trigger_tests = parse_commit(message, jobs)

        cmdline_interactive = params.get('interactive', False)

        # Default to current time if querying the head rev fails
        vcs_info = query_vcs_info(params['head_repository'],
                                  params['head_rev'])
        changed_files = set()
        if vcs_info:

            logger.debug('{} commits influencing task scheduling:'.format(
                len(vcs_info.changesets)))
            for c in vcs_info.changesets:
                logger.debug("{cset} {desc}".format(
                    cset=c['node'][0:12],
                    desc=c['desc'].splitlines()[0].encode('ascii', 'ignore')))
                changed_files |= set(c['files'])

        pushdate = time.strftime('%Y%m%d%H%M%S',
                                 time.gmtime(params['pushdate']))

        # Template parameters used when expanding the graph
        parameters = dict(
            gaia_info().items() + {
                'index':
                'index',
                'project':
                project,
                'pushlog_id':
                params.get('pushlog_id', 0),
                'docker_image':
                docker_image,
                'base_repository':
                params['base_repository'] or params['head_repository'],
                'head_repository':
                params['head_repository'],
                'head_ref':
                params['head_ref'] or params['head_rev'],
                'head_rev':
                params['head_rev'],
                'pushdate':
                pushdate,
                'pushtime':
                pushdate[8:],
                'year':
                pushdate[0:4],
                'month':
                pushdate[4:6],
                'day':
                pushdate[6:8],
                'rank':
                params['pushdate'],
                'owner':
                params['owner'],
                'level':
                params['level'],
            }.items())

        routes_file = os.path.join(root, 'routes.json')
        with open(routes_file) as f:
            contents = json.load(f)
            json_routes = contents['routes']
            # TODO: Nightly and/or l10n routes

        # Task graph we are generating for taskcluster...
        graph = {
            'tasks': [],
            'scopes': set(),
        }

        for env in TREEHERDER_ROUTES:
            route = format_treeherder_route(TREEHERDER_ROUTES[env],
                                            parameters['project'],
                                            parameters['head_rev'],
                                            parameters['pushlog_id'])
            graph['scopes'].add("queue:route:{}".format(route))

        graph['metadata'] = {
            'source':
            '{repo}file/{rev}/testing/taskcluster/mach_commands.py'.format(
                repo=params['head_repository'], rev=params['head_rev']),
            'owner':
            params['owner'],
            # TODO: Add full mach commands to this example?
            'description':
            'Task graph generated via ./mach taskcluster-graph',
            'name':
            'task graph local'
        }

        # Filter the job graph according to conditions met by this invocation run.
        def should_run(task):
            # Old style build or test task that doesn't define conditions. Always runs.
            if 'when' not in task:
                return True

            when = task['when']

            # If the task defines file patterns and we have a set of changed
            # files to compare against, only run if a file pattern matches one
            # of the changed files.
            file_patterns = when.get('file_patterns', None)
            if file_patterns and changed_files:
                # Always consider changes to the task definition itself
                file_patterns.append(
                    'testing/taskcluster/{task}'.format(task=task['task']))
                for pattern in file_patterns:
                    for path in changed_files:
                        if mozpackmatch(path, pattern):
                            logger.debug(
                                'scheduling {task} because pattern {pattern} '
                                'matches {path}'.format(
                                    task=task['task'],
                                    pattern=pattern,
                                    path=path,
                                ))
                            return True

                # No file patterns matched. Discard task.
                logger.debug(
                    'discarding {task} because no relevant files changed'.
                    format(task=task['task'], pattern=pattern, path=path))
                return False

            return True

        job_graph = filter(should_run, job_graph)

        all_routes = {}

        for build in job_graph:
            logging.debug("loading build task {}".format(build['task']))
            interactive = cmdline_interactive or build["interactive"]
            build_parameters = merge_dicts(parameters,
                                           build['additional-parameters'])
            build_parameters['build_slugid'] = mklabel()
            build_parameters[
                'source'] = '{repo}file/{rev}/testing/taskcluster/{file}'.format(
                    repo=params['head_repository'],
                    rev=params['head_rev'],
                    file=build['task'])
            build_task = templates.load(build['task'], build_parameters)

            # Copy build_* attributes to expose them to post-build tasks
            # as well as json routes and tests
            task_extra = build_task['task']['extra']
            build_parameters['build_name'] = task_extra['build_name']
            build_parameters['build_type'] = task_extra['build_type']
            build_parameters['build_product'] = task_extra['build_product']

            if 'treeherder' in task_extra:
                tier = task_extra['treeherder'].get('tier', 1)
                if tier != 1:
                    # Only tier 1 jobs use the build time as rank. Everything
                    # else gets rank 0 until it is promoted to tier 1.
                    task_extra['index']['rank'] = 0

            set_interactive_task(build_task, interactive)

            # try builds don't use cache nor coalescing
            if project == "try":
                remove_caches_from_task(build_task)
                remove_coalescing_from_task(build_task)
                set_expiration(build_task, TRY_EXPIRATION)

            decorate_task_treeherder_routes(build_task['task'],
                                            build_parameters['project'],
                                            build_parameters['head_rev'],
                                            build_parameters['pushlog_id'])
            decorate_task_json_routes(build_task['task'], json_routes,
                                      build_parameters)

            # Ensure each build graph is valid after construction.
            validate_build_task(build_task)
            attributes = build_task['attributes'] = {
                'kind': 'legacy',
                'legacy_kind': 'build',
                'run_on_projects': ['all'],
            }
            if 'build_name' in build:
                attributes['build_platform'] = build['build_name']
            if 'build_type' in task_extra:
                attributes['build_type'] = {
                    'dbg': 'debug'
                }.get(task_extra['build_type'], task_extra['build_type'])
            if build.get('is_job'):
                attributes['job'] = build['build_name']
                attributes['legacy_kind'] = 'job'
            graph['tasks'].append(build_task)

            for location in build_task['task']['extra'].get('locations', {}):
                build_parameters['{}_location'.format(location)] = \
                    build_task['task']['extra']['locations'][location]

            for url in build_task['task']['extra'].get('url', {}):
                build_parameters['{}_url'.format(url)] = \
                    build_task['task']['extra']['url'][url]

            define_task = DEFINE_TASK.format(build_task['task']['workerType'])

            for route in build_task['task'].get('routes', []):
                if route.startswith('index.gecko.v2') and route in all_routes:
                    raise Exception(
                        "Error: route '%s' is in use by multiple tasks: '%s' and '%s'"
                        % (
                            route,
                            build_task['task']['metadata']['name'],
                            all_routes[route],
                        ))
                all_routes[route] = build_task['task']['metadata']['name']

            graph['scopes'].add(define_task)
            graph['scopes'] |= set(build_task['task'].get('scopes', []))
            route_scopes = map(lambda route: 'queue:route:' + route,
                               build_task['task'].get('routes', []))
            graph['scopes'] |= set(route_scopes)

            # Treeherder symbol configuration for the graph required for each
            # build so tests know which platform they belong to.
            build_treeherder_config = build_task['task']['extra']['treeherder']

            if 'machine' not in build_treeherder_config:
                message = '({}), extra.treeherder.machine required for all builds'
                raise ValueError(message.format(build['task']))

            if 'build' not in build_treeherder_config:
                build_treeherder_config['build'] = \
                    build_treeherder_config['machine']

            if 'collection' not in build_treeherder_config:
                build_treeherder_config['collection'] = {'opt': True}

            if len(build_treeherder_config['collection'].keys()) != 1:
                message = '({}), extra.treeherder.collection must contain one type'
                raise ValueError(message.fomrat(build['task']))

            for post_build in build['post-build']:
                # copy over the old parameters to update the template
                # TODO additional-parameters is currently not an option, only
                # enabled for build tasks
                post_parameters = merge_dicts(
                    build_parameters,
                    post_build.get('additional-parameters', {}))
                post_task = configure_dependent_task(post_build['task'],
                                                     post_parameters,
                                                     mklabel(), templates,
                                                     build_treeherder_config)
                set_interactive_task(post_task, interactive)

                if project == "try":
                    set_expiration(post_task, TRY_EXPIRATION)

                post_task['attributes'] = attributes.copy()
                post_task['attributes']['legacy_kind'] = 'post_build'
                post_task['attributes']['post_build'] = post_build['job_flag']
                graph['tasks'].append(post_task)

        graph['scopes'] = sorted(graph['scopes'])

        # Convert to a dictionary of tasks.  The process above has invented a
        # taskId for each task, and we use those as the *labels* for the tasks;
        # taskgraph will later assign them new taskIds.
        return [
            cls(kind,
                t['taskId'],
                task=t['task'],
                attributes=t['attributes'],
                task_dict=t) for t in graph['tasks']
        ]
Exemplo n.º 12
0
    def load_tasks(cls, kind, path, config, params, loaded_tasks):
        # TODO: make this match the pushdate (get it from a parameter rather than vcs)
        pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime())

        parameters = {
            'pushlog_id':
            params.get('pushlog_id', 0),
            'pushdate':
            pushdate,
            'pushtime':
            pushdate[8:],
            'year':
            pushdate[0:4],
            'month':
            pushdate[4:6],
            'day':
            pushdate[6:8],
            'project':
            params['project'],
            'docker_image':
            docker_image,
            'base_repository':
            params['base_repository'] or params['head_repository'],
            'head_repository':
            params['head_repository'],
            'head_ref':
            params['head_ref'] or params['head_rev'],
            'head_rev':
            params['head_rev'],
            'owner':
            params['owner'],
            'level':
            params['level'],
            'source':
            '{repo}file/{rev}/taskcluster/ci/docker-image/image.yml'.format(
                repo=params['head_repository'], rev=params['head_rev']),
        }

        tasks = []
        templates = Templates(path)
        for image_name in config['images']:
            context_path = os.path.join('testing', 'docker', image_name)
            context_hash = generate_context_hash(context_path)

            image_parameters = dict(parameters)
            image_parameters['context_hash'] = context_hash
            image_parameters['context_path'] = context_path
            image_parameters['artifact_path'] = 'public/image.tar'
            image_parameters['image_name'] = image_name

            image_artifact_path = \
                "public/decision_task/image_contexts/{}/context.tar.gz".format(image_name)
            if os.environ.get('TASK_ID'):
                destination = os.path.join(
                    os.environ['HOME'],
                    "artifacts/decision_task/image_contexts/{}/context.tar.gz".
                    format(image_name))
                image_parameters['context_url'] = ARTIFACT_URL.format(
                    os.environ['TASK_ID'], image_artifact_path)
                cls.create_context_tar(context_path, destination, image_name)
            else:
                # skip context generation since this isn't a decision task
                # TODO: generate context tarballs using subdirectory clones in
                # the image-building task so we don't have to worry about this.
                image_parameters[
                    'context_url'] = 'file:///tmp/' + image_artifact_path

            image_task = templates.load('image.yml', image_parameters)

            attributes = {'image_name': image_name}

            # As an optimization, if the context hash exists for mozilla-central, that image
            # task ID will be used.  The reasoning behind this is that eventually everything ends
            # up on mozilla-central at some point if most tasks use this as a common image
            # for a given context hash, a worker within Taskcluster does not need to contain
            # the same image per branch.
            index_paths = [
                'docker.images.v1.{}.{}.hash.{}'.format(
                    project, image_name, context_hash)
                for project in ['mozilla-central', params['project']]
            ]

            tasks.append(
                cls(kind,
                    'build-docker-image-' + image_name,
                    task=image_task['task'],
                    attributes=attributes,
                    index_paths=index_paths))

        return tasks
Exemplo n.º 13
0
    def load_tasks(cls, kind, path, config, params, loaded_tasks):
        # TODO: make this match the pushdate (get it from a parameter rather than vcs)
        pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime())

        parameters = {
            'pushlog_id': params.get('pushlog_id', 0),
            'pushdate': pushdate,
            'pushtime': pushdate[8:],
            'year': pushdate[0:4],
            'month': pushdate[4:6],
            'day': pushdate[6:8],
            'project': params['project'],
            'docker_image': docker_image,
            'base_repository': params['base_repository'] or params['head_repository'],
            'head_repository': params['head_repository'],
            'head_ref': params['head_ref'] or params['head_rev'],
            'head_rev': params['head_rev'],
            'owner': params['owner'],
            'level': params['level'],
            'source': '{repo}file/{rev}/taskcluster/ci/docker-image/image.yml'
                      .format(repo=params['head_repository'], rev=params['head_rev']),
        }

        tasks = []
        templates = Templates(path)
        for image_name in config['images']:
            context_path = os.path.join('testing', 'docker', image_name)
            context_hash = generate_context_hash(context_path)

            image_parameters = dict(parameters)
            image_parameters['context_hash'] = context_hash
            image_parameters['context_path'] = context_path
            image_parameters['artifact_path'] = 'public/image.tar'
            image_parameters['image_name'] = image_name

            image_artifact_path = \
                "public/decision_task/image_contexts/{}/context.tar.gz".format(image_name)
            if os.environ.get('TASK_ID'):
                destination = os.path.join(
                    os.environ['HOME'],
                    "artifacts/decision_task/image_contexts/{}/context.tar.gz".format(image_name))
                image_parameters['context_url'] = ARTIFACT_URL.format(
                    os.environ['TASK_ID'], image_artifact_path)
                cls.create_context_tar(context_path, destination, image_name)
            else:
                # skip context generation since this isn't a decision task
                # TODO: generate context tarballs using subdirectory clones in
                # the image-building task so we don't have to worry about this.
                image_parameters['context_url'] = 'file:///tmp/' + image_artifact_path

            image_task = templates.load('image.yml', image_parameters)

            attributes = {'image_name': image_name}

            # As an optimization, if the context hash exists for mozilla-central, that image
            # task ID will be used.  The reasoning behind this is that eventually everything ends
            # up on mozilla-central at some point if most tasks use this as a common image
            # for a given context hash, a worker within Taskcluster does not need to contain
            # the same image per branch.
            index_paths = ['docker.images.v1.{}.{}.hash.{}'.format(
                                project, image_name, context_hash)
                           for project in ['mozilla-central', params['project']]]

            tasks.append(cls(kind, 'build-docker-image-' + image_name,
                             task=image_task['task'], attributes=attributes,
                             index_paths=index_paths))

        return tasks
Exemplo n.º 14
0
    def load_tasks(cls, kind, path, config, params, loaded_tasks):
        root = os.path.abspath(
            os.path.join(path, config['nightly_fennec_path']))

        project = params['project']

        # Set up the parameters, including the time of the build
        push_epoch = int(time.time())
        vcs_info = query_vcs_info(params['head_repository'],
                                  params['head_rev'])
        changed_files = set()
        if vcs_info:
            push_epoch = vcs_info.pushdate

            logger.debug('{} commits influencing task scheduling:'.format(
                len(vcs_info.changesets)))
            for c in vcs_info.changesets:
                logger.debug("{cset} {desc}".format(
                    cset=c['node'][0:12],
                    desc=c['desc'].splitlines()[0].encode('ascii', 'ignore')))
                changed_files |= set(c['files'])

        pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(push_epoch))

        # Template parameters used when expanding the graph
        parameters = dict(
            gaia_info().items() + {
                'index':
                'index',
                'project':
                project,
                'pushlog_id':
                params.get('pushlog_id', 0),
                'base_repository':
                params['base_repository'] or params['head_repository'],
                'docker_image':
                docker_image,
                'head_repository':
                params['head_repository'],
                'head_ref':
                params['head_ref'] or params['head_rev'],
                'head_rev':
                params['head_rev'],
                'pushdate':
                pushdate,
                'pushtime':
                pushdate[8:],
                'year':
                pushdate[0:4],
                'month':
                pushdate[4:6],
                'day':
                pushdate[6:8],
                'rank':
                push_epoch,
                'owner':
                params['owner'],
                'level':
                params['level'],
                'build_slugid':
                mklabel(),
                'source':
                '{repo}file/{rev}/taskcluster/ci/nightly-fennec'.format(
                    repo=params['head_repository'], rev=params['head_rev']),
                'build_name':
                'android',
                'build_type':
                'opt',
                'build_product':
                'mobile'
            }.items())

        routes_file = os.path.join(root, 'routes.json')
        with open(routes_file) as f:
            contents = json.load(f)
            json_routes = contents['routes']

        tasks = []
        templates = Templates(root)

        task = templates.load('android-api-15-nightly-build.yml', parameters)
        decorate_task_json_routes(task['task'], json_routes, parameters)

        attributes = {'kind': 'nightly-fennec'}
        tasks.append(
            cls(kind,
                'build-nightly-fennec',
                task=task['task'],
                attributes=attributes,
                task_dict=task))

        # Convert to a dictionary of tasks.  The process above has invented a
        # taskId for each task, and we use those as the *labels* for the tasks;
        # taskgraph will later assign them new taskIds.
        return tasks
Exemplo n.º 15
0
    def load_tasks(cls, kind, path, config, params, loaded_tasks):
        root = os.path.abspath(os.path.join(path, config['legacy_path']))

        project = params['project']
        # NOTE: message is ignored here; we always use DEFAULT_TRY, then filter the
        # resulting task graph later
        message = DEFAULT_TRY

        templates = Templates(root)

        job_path = os.path.join(root, 'tasks', 'branches', project, 'job_flags.yml')
        job_path = job_path if os.path.exists(job_path) else \
            os.path.join(root, DEFAULT_JOB_PATH)

        jobs = templates.load(job_path, {})

        job_graph, trigger_tests = parse_commit(message, jobs)

        cmdline_interactive = params.get('interactive', False)

        # Default to current time if querying the head rev fails
        push_epoch = int(time.time())
        vcs_info = query_vcs_info(params['head_repository'], params['head_rev'])
        changed_files = set()
        if vcs_info:
            push_epoch = vcs_info.pushdate

            logger.debug(
                '{} commits influencing task scheduling:'.format(len(vcs_info.changesets)))
            for c in vcs_info.changesets:
                logger.debug("{cset} {desc}".format(
                    cset=c['node'][0:12],
                    desc=c['desc'].splitlines()[0].encode('ascii', 'ignore')))
                changed_files |= set(c['files'])

        pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(push_epoch))

        # Template parameters used when expanding the graph
        parameters = dict(gaia_info().items() + {
            'index': 'index',
            'project': project,
            'pushlog_id': params.get('pushlog_id', 0),
            'docker_image': docker_image,
            'base_repository': params['base_repository'] or
            params['head_repository'],
            'head_repository': params['head_repository'],
            'head_ref': params['head_ref'] or params['head_rev'],
            'head_rev': params['head_rev'],
            'pushdate': pushdate,
            'pushtime': pushdate[8:],
            'year': pushdate[0:4],
            'month': pushdate[4:6],
            'day': pushdate[6:8],
            'rank': push_epoch,
            'owner': params['owner'],
            'level': params['level'],
        }.items())

        routes_file = os.path.join(root, 'routes.json')
        with open(routes_file) as f:
            contents = json.load(f)
            json_routes = contents['routes']
            # TODO: Nightly and/or l10n routes

        # Task graph we are generating for taskcluster...
        graph = {
            'tasks': [],
            'scopes': set(),
        }

        for env in TREEHERDER_ROUTES:
            route = format_treeherder_route(TREEHERDER_ROUTES[env],
                                            parameters['project'],
                                            parameters['head_rev'],
                                            parameters['pushlog_id'])
            graph['scopes'].add("queue:route:{}".format(route))

        graph['metadata'] = {
            'source': '{repo}file/{rev}/testing/taskcluster/mach_commands.py'.format(
                repo=params['head_repository'], rev=params['head_rev']),
            'owner': params['owner'],
            # TODO: Add full mach commands to this example?
            'description': 'Task graph generated via ./mach taskcluster-graph',
            'name': 'task graph local'
        }

        # Filter the job graph according to conditions met by this invocation run.
        def should_run(task):
            # Old style build or test task that doesn't define conditions. Always runs.
            if 'when' not in task:
                return True

            when = task['when']

            # If the task defines file patterns and we have a set of changed
            # files to compare against, only run if a file pattern matches one
            # of the changed files.
            file_patterns = when.get('file_patterns', None)
            if file_patterns and changed_files:
                # Always consider changes to the task definition itself
                file_patterns.append('testing/taskcluster/{task}'.format(task=task['task']))
                for pattern in file_patterns:
                    for path in changed_files:
                        if mozpackmatch(path, pattern):
                            logger.debug('scheduling {task} because pattern {pattern} '
                                         'matches {path}'.format(
                                             task=task['task'],
                                             pattern=pattern,
                                             path=path,
                                         ))
                            return True

                # No file patterns matched. Discard task.
                logger.debug('discarding {task} because no relevant files changed'.format(
                    task=task['task'],
                    pattern=pattern,
                    path=path))
                return False

            return True

        job_graph = filter(should_run, job_graph)

        all_routes = {}

        for build in job_graph:
            logging.debug("loading build task {}".format(build['task']))
            interactive = cmdline_interactive or build["interactive"]
            build_parameters = merge_dicts(parameters, build['additional-parameters'])
            build_parameters['build_slugid'] = mklabel()
            build_parameters['source'] = '{repo}file/{rev}/testing/taskcluster/{file}'.format(
                repo=params['head_repository'], rev=params['head_rev'], file=build['task'])
            build_task = templates.load(build['task'], build_parameters)

            # Copy build_* attributes to expose them to post-build tasks
            # as well as json routes and tests
            task_extra = build_task['task']['extra']
            build_parameters['build_name'] = task_extra['build_name']
            build_parameters['build_type'] = task_extra['build_type']
            build_parameters['build_product'] = task_extra['build_product']

            if 'treeherder' in task_extra:
                tier = task_extra['treeherder'].get('tier', 1)
                if tier != 1:
                    # Only tier 1 jobs use the build time as rank. Everything
                    # else gets rank 0 until it is promoted to tier 1.
                    task_extra['index']['rank'] = 0

            set_interactive_task(build_task, interactive)

            # try builds don't use cache nor coalescing
            if project == "try":
                remove_caches_from_task(build_task)
                remove_coalescing_from_task(build_task)
                set_expiration(build_task, TRY_EXPIRATION)

            decorate_task_treeherder_routes(build_task['task'],
                                            build_parameters['project'],
                                            build_parameters['head_rev'],
                                            build_parameters['pushlog_id'])
            decorate_task_json_routes(build_task['task'],
                                      json_routes,
                                      build_parameters)

            # Ensure each build graph is valid after construction.
            validate_build_task(build_task)
            attributes = build_task['attributes'] = {'kind': 'legacy', 'legacy_kind': 'build'}
            if 'build_name' in build:
                attributes['build_platform'] = build['build_name']
            if 'build_type' in task_extra:
                attributes['build_type'] = {'dbg': 'debug'}.get(task_extra['build_type'],
                                                                task_extra['build_type'])
            if build.get('is_job'):
                attributes['job'] = build['build_name']
                attributes['legacy_kind'] = 'job'
            graph['tasks'].append(build_task)

            for location in build_task['task']['extra'].get('locations', {}):
                build_parameters['{}_location'.format(location)] = \
                    build_task['task']['extra']['locations'][location]

            for url in build_task['task']['extra'].get('url', {}):
                build_parameters['{}_url'.format(url)] = \
                    build_task['task']['extra']['url'][url]

            define_task = DEFINE_TASK.format(build_task['task']['workerType'])

            for route in build_task['task'].get('routes', []):
                if route.startswith('index.gecko.v2') and route in all_routes:
                    raise Exception(
                        "Error: route '%s' is in use by multiple tasks: '%s' and '%s'" % (
                            route,
                            build_task['task']['metadata']['name'],
                            all_routes[route],
                        ))
                all_routes[route] = build_task['task']['metadata']['name']

            graph['scopes'].add(define_task)
            graph['scopes'] |= set(build_task['task'].get('scopes', []))
            route_scopes = map(
                lambda route: 'queue:route:' + route, build_task['task'].get('routes', [])
            )
            graph['scopes'] |= set(route_scopes)

            # Treeherder symbol configuration for the graph required for each
            # build so tests know which platform they belong to.
            build_treeherder_config = build_task['task']['extra']['treeherder']

            if 'machine' not in build_treeherder_config:
                message = '({}), extra.treeherder.machine required for all builds'
                raise ValueError(message.format(build['task']))

            if 'build' not in build_treeherder_config:
                build_treeherder_config['build'] = \
                    build_treeherder_config['machine']

            if 'collection' not in build_treeherder_config:
                build_treeherder_config['collection'] = {'opt': True}

            if len(build_treeherder_config['collection'].keys()) != 1:
                message = '({}), extra.treeherder.collection must contain one type'
                raise ValueError(message.fomrat(build['task']))

            for post_build in build['post-build']:
                # copy over the old parameters to update the template
                # TODO additional-parameters is currently not an option, only
                # enabled for build tasks
                post_parameters = merge_dicts(build_parameters,
                                              post_build.get('additional-parameters', {}))
                post_task = configure_dependent_task(post_build['task'],
                                                     post_parameters,
                                                     mklabel(),
                                                     templates,
                                                     build_treeherder_config)
                set_interactive_task(post_task, interactive)

                if project == "try":
                    set_expiration(post_task, TRY_EXPIRATION)

                post_task['attributes'] = attributes.copy()
                post_task['attributes']['legacy_kind'] = 'post_build'
                post_task['attributes']['post_build'] = post_build['job_flag']
                graph['tasks'].append(post_task)

        graph['scopes'] = sorted(graph['scopes'])

        # Convert to a dictionary of tasks.  The process above has invented a
        # taskId for each task, and we use those as the *labels* for the tasks;
        # taskgraph will later assign them new taskIds.
        return [
            cls(kind, t['taskId'], task=t['task'], attributes=t['attributes'], task_dict=t)
            for t in graph['tasks']
        ]
Exemplo n.º 16
0
class TemplatesTest(unittest.TestCase):
    def setUp(self):
        self.mocked_open = mozunit.MockedOpen(files)
        self.mocked_open.__enter__()
        self.subject = Templates('/fixtures')

    def tearDown(self):
        self.mocked_open.__exit__(None, None, None)

    def test_invalid_path(self):
        with self.assertRaisesRegexp(TemplatesException,
                                     'must be a directory'):
            Templates('/zomg/not/a/dir')

    def test_no_templates(self):
        content = self.subject.load('simple.yml', {})
        self.assertEquals(content, {'is_simple': True})

    def test_with_templates(self):
        content = self.subject.load('templates.yml', {'woot': 'bar'})

        self.assertEquals(content, {'content': 'content', 'variable': 'bar'})

    def test_inheritance(self):
        '''
        The simple single pass inheritance case.
        '''
        content = self.subject.load('inherit.yml', {})
        self.assertEqual(content, {
            'content': 'content',
            'variable': 'inherit'
        })

    def test_inheritance_implicat_pass(self):
        '''
        Implicitly pass parameters from the child to the ancestor.
        '''
        content = self.subject.load('inherit_pass.yml', {'a': 'overriden'})

        self.assertEqual(content, {'values': ['overriden', 'b', 'c']})

    def test_inheritance_circular(self):
        '''
        Circular reference handling.
        '''
        with self.assertRaisesRegexp(TemplatesException, 'circular'):
            self.subject.load('circular.yml', {})

    def test_deep_inheritance(self):
        content = self.subject.load('deep/4.yml', {'value': 'myvalue'})
        self.assertEqual(content, {'variable': 'myvalue'})

    def test_inheritance_with_simple_extensions(self):
        content = self.subject.load('extend_parent.yml', {})
        self.assertEquals(
            content, {
                'list': ['1', '2', '3', '4'],
                'obj': {
                    'from_parent': True,
                    'deeper': {
                        'woot': 'bar',
                        'list': ['baz', 'bar']
                    },
                    'level': 2,
                },
                'was_list': {
                    'replaced': True
                }
            })
Exemplo n.º 17
0
class TemplatesTest(unittest.TestCase):

    def setUp(self):
        self.mocked_open = mozunit.MockedOpen(files)
        self.mocked_open.__enter__()
        self.subject = Templates('/fixtures')

    def tearDown(self):
        self.mocked_open.__exit__(None, None, None)

    def test_invalid_path(self):
        with self.assertRaisesRegexp(TemplatesException, 'must be a directory'):
            Templates('/zomg/not/a/dir')

    def test_no_templates(self):
        content = self.subject.load('simple.yml', {})
        self.assertEquals(content, {
            'is_simple': True
        })

    def test_with_templates(self):
        content = self.subject.load('templates.yml', {
            'woot': 'bar'
        })

        self.assertEquals(content, {
            'content': 'content',
            'variable': 'bar'
        })

    def test_inheritance(self):
        '''
        The simple single pass inheritance case.
        '''
        content = self.subject.load('inherit.yml', {})
        self.assertEqual(content, {
            'content': 'content',
            'variable': 'inherit'
        })

    def test_inheritance_implicat_pass(self):
        '''
        Implicitly pass parameters from the child to the ancestor.
        '''
        content = self.subject.load('inherit_pass.yml', {
            'a': 'overriden'
        })

        self.assertEqual(content, {'values': ['overriden', 'b', 'c']})

    def test_inheritance_circular(self):
        '''
        Circular reference handling.
        '''
        with self.assertRaisesRegexp(TemplatesException, 'circular'):
            self.subject.load('circular.yml', {})

    def test_deep_inheritance(self):
        content = self.subject.load('deep/4.yml', {
            'value': 'myvalue'
        })
        self.assertEqual(content, {'variable': 'myvalue'})

    def test_inheritance_with_simple_extensions(self):
        content = self.subject.load('extend_parent.yml', {})
        self.assertEquals(content, {
            'list': ['1', '2', '3', '4'],
            'obj': {
                'from_parent': True,
                'deeper': {
                    'woot': 'bar',
                    'list': ['baz', 'bar']
                },
                'level': 2,
            },
            'was_list': {'replaced': True}
        })
Exemplo n.º 18
0
 def test_invalid_path(self):
     with self.assertRaisesRegexp(TemplatesException,
                                  'must be a directory'):
         Templates('/zomg/not/a/dir')
Exemplo n.º 19
0
 def setUp(self):
     self.mocked_open = mozunit.MockedOpen(files)
     self.mocked_open.__enter__()
     self.subject = Templates('/fixtures')
Exemplo n.º 20
0
 def setUp(self):
     self.mocked_open = mozunit.MockedOpen(files)
     self.mocked_open.__enter__()
     self.subject = Templates('/fixtures')
Exemplo n.º 21
0
    def load_tasks(cls, kind, path, config, params, loaded_tasks):
        root = os.path.abspath(os.path.join(path, config[
            'nightly_fennec_path']))

        project = params['project']

        # Set up the parameters, including the time of the build
        push_epoch = int(time.time())
        vcs_info = query_vcs_info(params['head_repository'], params['head_rev'])
        changed_files = set()
        if vcs_info:
            push_epoch = vcs_info.pushdate

            logger.debug(
                '{} commits influencing task scheduling:'.format(len(vcs_info.changesets)))
            for c in vcs_info.changesets:
                logger.debug("{cset} {desc}".format(
                    cset=c['node'][0:12],
                    desc=c['desc'].splitlines()[0].encode('ascii', 'ignore')))
                changed_files |= set(c['files'])

        pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(push_epoch))

        # Template parameters used when expanding the graph
        parameters = dict(gaia_info().items() + {
            'index': 'index',
            'project': project,
            'pushlog_id': params.get('pushlog_id', 0),
            'base_repository': params['base_repository'] or params['head_repository'],
            'docker_image': docker_image,
            'head_repository': params['head_repository'],
            'head_ref': params['head_ref'] or params['head_rev'],
            'head_rev': params['head_rev'],
            'pushdate': pushdate,
            'pushtime': pushdate[8:],
            'year': pushdate[0:4],
            'month': pushdate[4:6],
            'day': pushdate[6:8],
            'rank': push_epoch,
            'owner': params['owner'],
            'level': params['level'],
            'build_slugid': mklabel(),
            'source': '{repo}file/{rev}/taskcluster/ci/nightly-fennec'.format(
                repo=params['head_repository'], rev=params['head_rev']),
            'build_name': 'android',
            'build_type': 'opt',
            'build_product': 'mobile'
        }.items())

        routes_file = os.path.join(root, 'routes.json')
        with open(routes_file) as f:
            contents = json.load(f)
            json_routes = contents['routes']

        tasks = []
        templates = Templates(root)

        task = templates.load('android-api-15-nightly-build.yml', parameters)
        decorate_task_json_routes(task['task'], json_routes, parameters)

        attributes = {'kind': 'nightly-fennec'}
        tasks.append(cls(kind, 'build-nightly-fennec',
                     task=task['task'], attributes=attributes,
                         task_dict=task))

        # Convert to a dictionary of tasks.  The process above has invented a
        # taskId for each task, and we use those as the *labels* for the tasks;
        # taskgraph will later assign them new taskIds.
        return tasks