Example #1
0
def trigger_jobs(buildername, revision, back_revisions=30, times=30,
                 dry_run=False):
    buildername = sanitize_buildername(buildername)
    repo_url = query_repo_url_from_buildername(buildername)
    repo_name = query_repo_name_from_buildername(buildername)

    if back_revisions >= 0:
        # find the revision *back_revisions* before the one we got
        push_info = query_revision_info(repo_url, revision)
        end_id = int(push_info["pushid"])  # newest revision
        start_id = end_id - back_revisions
        revlist = query_pushid_range(repo_url=repo_url,
                                     start_id=start_id,
                                     end_id=end_id)
        revision = revlist[-1]

    requests = \
        trigger_job(revision, buildername, times=times, dry_run=dry_run)
    if any(req.status_code != 202 for req in requests):
        LOG.warn('WARNING: not all requests succeded')

    return ('https://treeherder.mozilla.org/#/jobs?%s' % urllib.urlencode({
        'repo': repo_name,
        'revision': revision,
        'filter-searchStr': buildername
    }))
def _query_metadata(repo_name, revision):
    repo_url = query_repo_url(repo_name)
    push_info = query_revision_info(repo_url, revision)

    return {
        'owner': push_info['user'],
        'source': '%s/rev/%s' % (repo_url, revision),
        'description': 'Task graph generated via Mozilla CI tools',
    }
Example #3
0
def _query_metadata(repo_name, revision):
    repo_url = query_repo_url(repo_name)
    push_info = query_revision_info(repo_url, revision)

    return {
        'owner': push_info['user'],
        'source': '%s/rev/%s' % (repo_url, revision),
        'description': 'Task graph generated via Mozilla CI tools',
    }
Example #4
0
def _query_metadata(repo_name, revision, name, description=None):
    global METADATA

    if not METADATA:
        repo_url = query_repo_url(repo_name)
        push_info = query_revision_info(repo_url, revision)

        if not description:
            description = 'Task graph generated via Mozilla CI tools'

        METADATA = {
            'description': description,
            'owner': push_info['user'],
            'source': '%s/rev/%s' % (repo_url, revision),
        }

    result = {'name': name}
    result.update(METADATA)

    return result
Example #5
0
def _query_metadata(repo_name, revision, name, description=None):
    global METADATA

    if not METADATA:
        repo_url = query_repo_url(repo_name)
        push_info = query_revision_info(repo_url, revision)

        if not description:
            description = "Task graph generated via Mozilla CI tools"

        METADATA = {
            "description": description,
            "owner": push_info["user"],
            "source": "%s/rev/%s" % (repo_url, revision),
        }

    result = {"name": name}
    result.update(METADATA)

    return result
Example #6
0
def _query_metadata(repo_name, revision, name, description=None):
    global METADATA

    if not METADATA:
        repo_url = query_repo_url(repo_name)
        push_info = query_revision_info(repo_url, revision)

        if not description:
            description = 'Task graph generated via Mozilla CI tools'

        METADATA = {
            'description': description,
            'owner': push_info['user'],
            'source': '%s/rev/%s' % (repo_url, revision),
        }

    result = {'name': name}
    result.update(METADATA)

    return result
Example #7
0
    options = parse_args()
    validate_options(options)

    if options.debug:
        LOG.setLevel(logging.DEBUG)
        logging.getLogger("requests").setLevel(logging.DEBUG)
        LOG.info("Setting DEBUG level")
    else:
        LOG.setLevel(logging.INFO)
        # requests is too noisy and adds no value
        logging.getLogger("requests").setLevel(logging.WARNING)

    repo_url = query_repo_url_from_buildername(options.buildername)

    if options.back_revisions:
        push_info = query_revision_info(repo_url, options.rev)
        end_id = int(push_info["pushid"])  # newest revision
        start_id = end_id - options.back_revisions
        revlist = query_pushid_range(repo_url=repo_url,
                                     start_id=start_id,
                                     end_id=end_id)

    elif options.delta:
        revlist = query_revisions_range_from_revision_and_delta(
            repo_url,
            options.rev,
            options.delta)

    elif options.from_rev:
        revlist = query_revisions_range(
            repo_url,
def _create_task(buildername,
                 repo_name,
                 revision,
                 task_graph_id=None,
                 parent_task_id=None,
                 requires=None,
                 properties={}):
    """Return takcluster task to trigger a buildbot builder.

    This function creates a generic task with the minimum amount of
    information required for the buildbot-bridge to consider it valid.
    You can establish a list dependencies to other tasks through the requires
    field.

    :param buildername: The name of a buildbot builder.
    :type buildername: str
    :param repo_name: The name of a repository e.g. mozilla-inbound
    :type repo_name: str
    :param revision: Changeset ID of a revision.
    :type revision: str
    :param task_graph_id: TC graph id to which this task belongs to
    :type task_graph_id: str
    :param parent_task_id: Task from which to find artifacts. It is not a dependency.
    :type parent_task_id: str
    :param requires: List of taskIds of other tasks which this task depends on.
    :type requires: list
    :returns: TaskCluster graph
    :rtype: dict

    """
    if not valid_builder(buildername):
        raise MozciError("The builder '%s' is not a valid one." % buildername)

    builder_info = get_buildername_metadata(buildername)
    if builder_info['repo_name'] != repo_name:
        raise MozciError("The builder '%s' should be for repo: %s." %
                         (buildername, repo_name))

    repo_url = query_repo_url(repo_name)
    push_info = query_revision_info(repo_url, revision)

    # Needed because of bug 1195751
    all_properties = {
        'product': builder_info['product'],
        'who': push_info['user'],
    }
    all_properties.update(properties)

    # XXX: We should validate that the parent task is a valid parent platform
    #      e.g. do not schedule Windows tests against Linux builds
    task = create_task(repo_name=repo_name,
                       revision=revision,
                       taskGroupId=task_graph_id,
                       workerType='buildbot-bridge',
                       provisionerId='buildbot-bridge',
                       payload={
                           'buildername': buildername,
                           'sourcestamp': {
                               'branch': repo_name,
                               'revision': revision
                           },
                           'properties': all_properties,
                       },
                       metadata_name=buildername)

    if requires:
        task['requires'] = requires

    # Setting a parent_task_id as a property allows Mozharness to
    # determine the artifacts we need for this job to run properly
    if parent_task_id:
        task['task']['payload']['properties'][
            'parent_task_id'] = parent_task_id

    return task
def _create_task(buildername, repo_name, revision, task_graph_id=None,
                 parent_task_id=None, requires=None, properties={}):
    """Return takcluster task to trigger a buildbot builder.

    This function creates a generic task with the minimum amount of
    information required for the buildbot-bridge to consider it valid.
    You can establish a list dependencies to other tasks through the requires
    field.

    :param buildername: The name of a buildbot builder.
    :type buildername: str
    :param repo_name: The name of a repository e.g. mozilla-inbound
    :type repo_name: str
    :param revision: Changeset ID of a revision.
    :type revision: str
    :param task_graph_id: TC graph id to which this task belongs to
    :type task_graph_id: str
    :param parent_task_id: Task from which to find artifacts. It is not a dependency.
    :type parent_task_id: str
    :param requires: List of taskIds of other tasks which this task depends on.
    :type requires: list
    :returns: TaskCluster graph
    :rtype: dict

    """
    if not valid_builder(buildername):
        raise MozciError("The builder '%s' is not a valid one." % buildername)

    builder_info = get_buildername_metadata(buildername)
    if builder_info['repo_name'] != repo_name:
        raise MozciError(
            "The builder '%s' should be for repo: %s." % (buildername, repo_name)
        )

    repo_url = query_repo_url(repo_name)
    push_info = query_revision_info(repo_url, revision)

    # Needed because of bug 1195751
    all_properties = {
        'product': builder_info['product'],
        'who': push_info['user'],
    }
    all_properties.update(properties)

    # XXX: We should validate that the parent task is a valid parent platform
    #      e.g. do not schedule Windows tests against Linux builds
    task = create_task(
        repo_name=repo_name,
        revision=revision,
        taskGroupId=task_graph_id,
        workerType='buildbot-bridge',
        provisionerId='buildbot-bridge',
        payload={
            'buildername': buildername,
            'sourcestamp': {
                'branch': repo_name,
                'revision': revision
            },
            'properties': all_properties,
        },
        metadata_name=buildername
    )

    if requires:
        task['requires'] = requires

    # Setting a parent_task_id as a property allows Mozharness to
    # determine the artifacts we need for this job to run properly
    if parent_task_id:
        task['task']['payload']['properties']['parent_task_id'] = parent_task_id

    return task
Example #10
0
def main():
    options = parse_args()
    validate_options(options)

    if options.debug:
        LOG.setLevel(logging.DEBUG)
        logging.getLogger("requests").setLevel(logging.DEBUG)
        LOG.info("Setting DEBUG level")
    else:
        LOG.setLevel(logging.INFO)
        # requests is too noisy and adds no value
        logging.getLogger("requests").setLevel(logging.WARNING)

    options.buildername = sanitize_buildername(options.buildername)
    repo_url = query_repo_url_from_buildername(options.buildername)

    if options.back_revisions:
        push_info = query_revision_info(repo_url, options.rev)
        end_id = int(push_info["pushid"])  # newest revision
        start_id = end_id - options.back_revisions
        revlist = query_pushid_range(repo_url=repo_url,
                                     start_id=start_id,
                                     end_id=end_id)

    elif options.delta:
        revlist = query_revisions_range_from_revision_and_delta(
            repo_url,
            options.rev,
            options.delta)

    elif options.from_rev:
        revlist = query_revisions_range(
            repo_url,
            to_revision=options.rev,
            from_revision=options.from_rev)

    elif options.backfill:
        push_info = query_revision_info(repo_url, options.rev)
        # A known bad revision
        end_id = int(push_info["pushid"])  # newest revision
        # The furthest we will go to find the last good job
        # We might find a good job before that
        start_id = end_id - options.max_revisions + 1
        revlist = query_pushid_range(repo_url=repo_url,
                                     start_id=start_id,
                                     end_id=end_id)

        revlist = backfill_revlist(
            options.buildername,
            revlist,
            options.times,
            options.dry_run
        )

    else:
        revlist = [options.rev]

    if options.skips:
        revlist = revlist[::options.skips]

    try:
        trigger_range(
            buildername=options.buildername,
            revisions=revlist,
            times=options.times,
            dry_run=options.dry_run,
            files=options.files
        )
    except Exception, e:
        LOG.exception(e)
        exit(1)