def query_revisions_range(repo_name, from_revision, to_revision): """Return a list of revisions for that range.""" return query_pushes_by_revision_range( repo_url=repositories.query_repo_url(repo_name), from_revision=from_revision, to_revision=to_revision, return_revision_list=True)
def generate_metadata(repo_name, revision, name, description='Task graph generated via Mozilla CI tools'): """ Generate metadata based on input :param repo_name: e.g. alder, mozilla-central :type repo_name: str :param revision: 12-chars representing a push :type revision: str :param name: Human readable name of task-graph, give people finding this an idea what this graph is about. :type name: str :param description: Human readable description of task-graph, explain what it does! :type description: str """ LOG.debug("Determining metadata.") repo_url = query_repo_url(repo_name) push_info = query_push_by_revision(repo_url=repo_url, revision=revision) return { 'name': name, 'description': description, 'owner': push_info.user, 'source': '%s/rev/%s' % (repo_url, revision), }
def query_revisions_range(repo_name, from_revision, to_revision): """Return a list of revisions for that range.""" return pushlog.query_revisions_range( repositories.query_repo_url(repo_name), from_revision, to_revision, )
def query_revisions_range(repo_name, from_revision, to_revision): """Return a list of revisions for that range.""" return query_pushes_by_revision_range( repo_url=repositories.query_repo_url(repo_name), from_revision=from_revision, to_revision=to_revision, return_revision_list=True )
def main(): options = parse_args() repo_url = query_repo_url(options.repo) if not valid_credentials(): sys.exit(-1) if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) if options.rev == 'tip': revision = query_repo_tip(repo_url) LOG.info("The tip of %s is %s", options.repo, revision) else: revision = query_full_revision_info(repo_url, options.rev) filters_in = options.includes.split(',') + [options.repo] filters_out = [] if options.exclude: filters_out = options.exclude.split(',') buildernames = filter_buildernames( buildernames=query_builders(repo_name=options.repo), include=filters_in, exclude=filters_out ) if len(buildernames) == 0: LOG.info("0 jobs match these filters, please try again.") return cont = raw_input("%i jobs will be triggered, do you wish to continue? y/n/d (d=show details) " % len(buildernames)) if cont.lower() == 'd': LOG.info("The following jobs will be triggered: \n %s" % '\n'.join(buildernames)) cont = raw_input("Do you wish to continue? y/n ") if cont.lower() != 'y': exit(1) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) for buildername in buildernames: trigger_range( buildername=buildername, revisions=[revision], times=options.times, dry_run=options.dry_run, ) LOG.info('https://treeherder.mozilla.org/#/jobs?%s' % urllib.urlencode({'repo': query_repo_name_from_buildername(buildername), 'fromchange': revision, 'tochange': revision, 'filter-searchStr': buildername}))
def _query_metadata(repo_name, revision, name, description=None): global METADATA if not METADATA: repo_url = query_repo_url(repo_name) push_info = query_revision_info(repo_url, revision) if not description: description = 'Task graph generated via Mozilla CI tools' METADATA = { 'description': description, 'owner': push_info['user'], 'source': '%s/rev/%s' % (repo_url, revision), } result = {'name': name} result.update(METADATA) return result
def _query_metadata(repo_name, revision, name, description=None): global METADATA if not METADATA: repo_url = query_repo_url(repo_name) push_info = query_revision_info(repo_url, revision) if not description: description = "Task graph generated via Mozilla CI tools" METADATA = { "description": description, "owner": push_info["user"], "source": "%s/rev/%s" % (repo_url, revision), } result = {"name": name} result.update(METADATA) return result
def generate_metadata(repo_name, revision, name, description='Task graph generated via Mozilla CI tools'): """ Generate metadata based on input :param repo_name: e.g. alder, mozilla-central :type repo_name: str :param revision: 12-chars representing a push :type revision: str :param name: Human readable name of task-graph, give people finding this an idea what this graph is about. :type name: str :param description: Human readable description of task-graph, explain what it does! :type description: str """ repo_url = query_repo_url(repo_name) push_info = query_push_by_revision(repo_url=repo_url, revision=revision) return { 'name': name, 'description': description, 'owner': push_info.user, 'source': '%s/rev/%s' % (repo_url, revision), }
def main(): options = parse_args() validate_options(options) repo_url = query_repo_url(options.repo_name) if not valid_credentials(): sys.exit(-1) if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: options.repo_name = query_repo_name_from_buildername( options.buildernames[0]) if options.rev == 'tip': revision = query_repo_tip(repo_url) LOG.info("The tip of %s is %s", options.repo_name, revision) else: revision = query_full_revision_info(repo_url, options.rev) # Mode 1: Trigger coalesced jobs if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status( options.repo_name, revision, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=options.repo_name, request_id=request_id, dry_run=options.dry_run) return # Mode #2: Fill-in a revision if options.fill_revision: trigger_missing_jobs_for_revision(repo_name=options.repo_name, revision=revision, dry_run=options.dry_run) return # Mode #3: Trigger jobs based on revision list modifiers for buildername in options.buildernames: revlist = determine_revlist(repo_url=repo_url, buildername=buildername, rev=revision, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) try: trigger_range( buildername=buildername, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=options.trigger_build_if_missing) except Exception, e: LOG.exception(e) exit(1) if revlist: LOG.info('https://treeherder.mozilla.org/#/jobs?%s' % urllib.urlencode({ 'repo': options.repo_name, 'fromchange': revlist[-1], 'tochange': revlist[0], 'filter-searchStr': buildername }))
def main(): options = parse_args() if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) validate_options(options) if not valid_credentials(): sys.exit(-1) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: repo_name = query_repo_name_from_buildername(options.buildernames[0]) else: repo_name = options.repo_name repo_url = query_repo_url(repo_name) if options.rev == 'tip': revision = query_repo_tip(repo_url).changesets[0].node LOG.info("The tip of %s is %s", repo_name, revision) else: revision = query_push_by_revision(repo_url, options.rev, return_revision_list=True) # Schedule jobs through TaskCluster if --taskcluster option has been set to true if options.taskcluster: mgr = TaskClusterBuildbotManager() else: mgr = BuildAPIManager() trigger_build_if_missing = options.trigger_build_if_missing if repo_name == 'try': trigger_build_if_missing = False # Mode 1: Trigger coalesced jobs if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status(repo_name, revision, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=repo_name, request_id=request_id, auth=get_credentials(), dry_run=options.dry_run) return # Mode #2: Fill-in a revision or trigger_test_jobs_only if options.fill_revision or options.trigger_tests_only: mgr.trigger_missing_jobs_for_revision( repo_name=repo_name, revision=revision, dry_run=options.dry_run, trigger_build_if_missing=not options.trigger_tests_only ) return # Mode #3: Trigger jobs based on revision list modifiers if not (options.includes or options.exclude or options.failed_jobs): job_names = options.buildernames # Mode 4 - Schedule every builder matching --includes and does not match --exclude. elif options.includes or options.exclude: filters_in = options.includes.split(',') + [repo_name] filters_out = [] if options.exclude: filters_out = options.exclude.split(',') job_names = filter_buildernames( buildernames=query_builders(repo_name=repo_name), include=filters_in, exclude=filters_out ) if len(job_names) == 0: LOG.info("0 jobs match these filters. please try again.") return if options.existing_only: # We query all succesful jobs for a given revision and filter # them by include/exclude filters. trigger_build_if_missing = False successful_jobs = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=SUCCESS) # We will filter out all the existing job from those successful job we have. job_names = [buildername for buildername in successful_jobs if buildername in job_names] cont = raw_input("The ones which have existing builds out of %i jobs will be triggered,\ do you wish to continue? y/n/d (d=show details) " % len(job_names)) else: cont = raw_input("%i jobs will be triggered, do you wish to continue? \ y/n/d (d=show details) " % len(job_names)) if cont.lower() == 'd': LOG.info("The following jobs will be triggered: \n %s" % '\n'.join(job_names)) cont = raw_input("Do you wish to continue? y/n ") if cont.lower() != 'y': exit(1) # Mode 5: Use --failed-jobs to trigger jobs for particular revision elif options.failed_jobs: job_names = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=WARNING) for buildername in job_names: revlist = determine_revlist( repo_url=repo_url, buildername=buildername, rev=revision, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) _print_treeherder_link( revlist=revlist, repo_name=repo_name, buildername=buildername, revision=revision, log=LOG, includes=options.includes, exclude=options.exclude) try: mgr.trigger_range( buildername=buildername, repo_name=repo_name, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=trigger_build_if_missing ) except Exception, e: LOG.exception(e) exit(1)
def main(): options = parse_args() if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) validate_options(options) if not options.dry_run and not valid_credentials(): sys.exit(-1) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: repo_name = query_repo_name_from_buildername(options.buildernames[0]) else: repo_name = options.repo_name repo_url = query_repo_url(repo_name) if options.rev == 'tip': revision = query_repo_tip(repo_url).changesets[0].node LOG.info("The tip of %s is %s", repo_name, revision) else: revision = query_push_by_revision(repo_url, options.rev, return_revision_list=True) # Schedule jobs through TaskCluster if --taskcluster option has been set to true if options.taskcluster: mgr = TaskClusterBuildbotManager() else: mgr = BuildAPIManager() trigger_build_if_missing = options.trigger_build_if_missing if repo_name == 'try': trigger_build_if_missing = False # Mode 0: Backfill if options.backfill: manual_backfill(revision, options.buildernames[0], dry_run=options.dry_run) return # Mode 1: Trigger coalesced jobs if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status(repo_name, revision, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=repo_name, request_id=request_id, auth=get_credentials(), dry_run=options.dry_run) return # Mode #2: Fill-in a revision or trigger_test_jobs_only if options.fill_revision or options.trigger_tests_only: mgr.trigger_missing_jobs_for_revision( repo_name=repo_name, revision=revision, dry_run=options.dry_run, trigger_build_if_missing=not options.trigger_tests_only ) return # Mode #3: Trigger jobs based on revision list modifiers if not (options.includes or options.exclude or options.failed_jobs): job_names = options.buildernames # Mode 4 - Schedule every builder matching --includes and does not match --exclude. elif options.includes or options.exclude: filters_in = options.includes.split(',') + [repo_name] filters_out = [] if options.exclude: filters_out = options.exclude.split(',') job_names = filter_buildernames( buildernames=query_builders(repo_name=repo_name), include=filters_in, exclude=filters_out ) if len(job_names) == 0: LOG.info("0 jobs match these filters. please try again.") return if options.existing_only: # We query all successful jobs for a given revision and filter # them by include/exclude filters. trigger_build_if_missing = False successful_jobs = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=SUCCESS) # We will filter out all the existing job from those successful job we have. job_names = [buildername for buildername in successful_jobs if buildername in job_names] cont = raw_input("The ones which have existing builds out of %i jobs will be triggered,\ do you wish to continue? y/n/d (d=show details) " % len(job_names)) else: cont = raw_input("%i jobs will be triggered, do you wish to continue? \ y/n/d (d=show details) " % len(job_names)) if cont.lower() == 'd': LOG.info("The following jobs will be triggered: \n %s" % '\n'.join(job_names)) cont = raw_input("Do you wish to continue? y/n ") if cont.lower() != 'y': exit(1) # Mode 5: Use --failed-jobs to trigger jobs for particular revision elif options.failed_jobs: job_names = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=WARNING) for buildername in job_names: revlist = determine_revlist( repo_url=repo_url, buildername=buildername, rev=revision, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) _print_treeherder_link( revlist=revlist, repo_name=repo_name, buildername=buildername, revision=revision, log=LOG, includes=options.includes, exclude=options.exclude) try: mgr.trigger_range( buildername=buildername, repo_name=repo_name, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=trigger_build_if_missing ) except Exception, e: LOG.exception(e) exit(1)
def _create_task(buildername, repo_name, revision, task_graph_id=None, parent_task_id=None, requires=None, properties={}): """Return takcluster task to trigger a buildbot builder. This function creates a generic task with the minimum amount of information required for the buildbot-bridge to consider it valid. You can establish a list dependencies to other tasks through the requires field. :param buildername: The name of a buildbot builder. :type buildername: str :param repo_name: The name of a repository e.g. mozilla-inbound :type repo_name: str :param revision: Changeset ID of a revision. :type revision: str :param task_graph_id: TC graph id to which this task belongs to :type task_graph_id: str :param parent_task_id: Task from which to find artifacts. It is not a dependency. :type parent_task_id: str :param requires: List of taskIds of other tasks which this task depends on. :type requires: list :returns: TaskCluster graph :rtype: dict """ if not valid_builder(buildername): raise MozciError("The builder '%s' is not a valid one." % buildername) builder_info = get_buildername_metadata(buildername) if builder_info['repo_name'] != repo_name: raise MozciError( "The builder '%s' should be for repo: %s." % (buildername, repo_name) ) repo_url = query_repo_url(repo_name) push_info = query_revision_info(repo_url, revision) # Needed because of bug 1195751 all_properties = { 'product': builder_info['product'], 'who': push_info['user'], } all_properties.update(properties) # XXX: We should validate that the parent task is a valid parent platform # e.g. do not schedule Windows tests against Linux builds task = create_task( repo_name=repo_name, revision=revision, taskGroupId=task_graph_id, workerType='buildbot-bridge', provisionerId='buildbot-bridge', payload={ 'buildername': buildername, 'sourcestamp': { 'branch': repo_name, 'revision': revision }, 'properties': all_properties, }, metadata_name=buildername ) if requires: task['requires'] = requires # Setting a parent_task_id as a property allows Mozharness to # determine the artifacts we need for this job to run properly if parent_task_id: task['task']['payload']['properties']['parent_task_id'] = parent_task_id return task
def test_query_repo_url_invalid(self, query_repository): """query_repo_url should raise an Exception when a repository not in the JSON file.""" with self.assertRaises(Exception): repositories.query_repo_url("not-a-repo")
def _create_task(buildername, repo_name, revision, metadata=None, task_graph_id=None, parent_task_id=None, requires=None, properties={}, *args, **kwargs): """Return takcluster task to trigger a buildbot builder. This function creates a generic task with the minimum amount of information required for the buildbot-bridge to consider it valid. You can establish a list dependencies to other tasks through the requires field. :param buildername: The name of a buildbot builder. :type buildername: str :param repo_name: The name of a repository e.g. mozilla-inbound :type repo_name: str :param revision: Changeset ID of a revision. :type revision: str :param metadata: Metadata for the task. If not specified, generate it. :type metadata: json :param task_graph_id: TC graph id to which this task belongs to :type task_graph_id: str :param parent_task_id: Task from which to find artifacts. It is not a dependency. :type parent_task_id: str :param requires: List of taskIds of other tasks which this task depends on. :type requires: list :returns: TaskCluster graph :rtype: dict """ if not valid_builder(buildername): raise MozciError("The builder '%s' is not a valid one." % buildername) builder_info = get_buildername_metadata(buildername) if builder_info['repo_name'] != repo_name: raise MozciError("The builder '%s' should be for repo: %s." % (buildername, repo_name)) repo_url = query_repo_url(repo_name) push_info = query_push_by_revision(repo_url=repo_url, revision=revision) full_revision = str(push_info.changesets[0].node) # Needed because of bug 1195751 all_properties = { 'product': builder_info['product'], 'who': push_info.user, } all_properties.update(properties) all_properties.update(get_builder_extra_properties(buildername)) metadata = metadata if metadata is not None else \ generate_metadata(repo_name=repo_name, revision=revision, name=buildername) # The task's name is used in the task-graph-inspector to list all tasks # and using the buildername makes it easy for a person to recognize each job. metadata['name'] = buildername # XXX: We should validate that the parent task is a valid parent platform # e.g. do not schedule Windows tests against Linux builds task = create_task( repo_name=repo_name, revision=revision, taskGroupId=task_graph_id, workerType='buildbot-bridge', provisionerId='buildbot-bridge', payload={ 'buildername': buildername, 'sourcestamp': { 'branch': repo_name, 'revision': full_revision }, 'properties': all_properties, }, metadata=metadata, ) if requires: task['requires'] = requires # Setting a parent_task_id as a property allows Mozharness to # determine the artifacts we need for this job to run properly if parent_task_id: task['task']['payload']['properties'][ 'parent_task_id'] = parent_task_id return task
def test_query_repo_url_valid(self, query_repository): """Test query_repo_url with a mock value for query_repository.""" self.assertEquals( repositories.query_repo_url('repo1'), "https://hg.mozilla.org/releases/repo1")
def query_repo_url_from_buildername(buildername): """Return the full repository URL for a given known buildername.""" repo_name = query_repo_name_from_buildername(buildername) return repositories.query_repo_url(repo_name)
def main(): parser = ArgumentParser() parser.add_argument("--debug", action="store_true", dest="debug", help="set debug for logging.") parser.add_argument("--dry-run", action="store_true", dest="dry_run", help="Dry run. No real actions are taken.") parser.add_argument("--repo-name", action="store", dest="repo_name", type=str, help="Repository name, e.g. mozilla-inbound.") parser.add_argument("--revision", action="store", dest="revision", type=str, help="12-char representing a push.") parser.add_argument("--trigger-from-task-id", action="store", dest="trigger_from_task_id", type=str, help="Trigger builders based on build task (use with " "--builders).") parser.add_argument("--builders", action="store", dest="builders", type=str, help="Use this if you want to pass a list of builders " "(e.g. \"['builder 1']\".") parser.add_argument("--children-of", action="store", dest="children_of", type=str, help="This allows you to request a list of all the associated " "test jobs to a build job.") parser.add_argument("-g", "--graph", action="store", dest="builders_graph", help='Graph of builders in the form of: ' 'dict(builder: [dep_builders].') options = parser.parse_args() if options.debug: setup_logging(logging.DEBUG) else: setup_logging() assert options.repo_name and options.revision, \ "Make sure you specify --repo-name and --revision" if not options.dry_run and not credentials_available(): sys.exit(1) repo_url = query_repo_url(options.repo_name) revision = query_push_by_revision(repo_url=repo_url, revision=options.revision, return_revision_list=True) builders = None if options.builders: builders = ast.literal_eval(options.builders) else: builders = get_downstream_jobs(options.children_of) if options.trigger_from_task_id and builders: trigger_builders_based_on_task_id( repo_name=options.repo_name, revision=revision, task_id=options.trigger_from_task_id, builders=builders, dry_run=options.dry_run ) elif builders: tc_graph = generate_tc_graph_from_builders( builders=builders, repo_name=options.repo_name, revision=revision ) mgr = TaskClusterManager() mgr.schedule_graph( task_graph=tc_graph, dry_run=options.dry_run ) elif options.builders_graph: mgr = TaskClusterBuildbotManager() mgr.schedule_graph( repo_name=options.repo_name, revision=revision, builders_graph=ast.literal_eval(options.builders_graph), dry_run=options.dry_run ) else: print "Please read the help menu to know what options are available to you."
def query_revisions_range(repo_name, from_revision, to_revision): """Return a list of revisions for that range.""" return pushlog.query_revisions_range(repositories.query_repo_url(repo_name), from_revision, to_revision)
def main(): options = parse_args() if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) if options.action == 'trigger-all-talos': trigger_all_talos_jobs(options.repo_name, options.rev, options.times, dry_run=options.dry_run) sys.exit(0) validate_options(options) if not options.dry_run and not valid_credentials(): sys.exit(-1) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: repo_name = query_repo_name_from_buildername(options.buildernames[0]) else: repo_name = options.repo_name repo_url = query_repo_url(repo_name) if options.rev == 'tip': revision = query_repo_tip(repo_url).changesets[0].node LOG.info("The tip of %s is %s", repo_name, revision) else: revision = query_push_by_revision(repo_url, options.rev, return_revision_list=True) # Schedule jobs through TaskCluster if --taskcluster option has been set to true if options.taskcluster: mgr = TaskClusterBuildbotManager(web_auth=True) else: mgr = BuildAPIManager() trigger_build_if_missing = options.trigger_build_if_missing if repo_name == 'try': trigger_build_if_missing = False # Mode 0: Backfill if options.backfill: manual_backfill(revision, options.buildernames[0], dry_run=options.dry_run) return # Mode 1: Trigger coalesced jobs if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status( repo_name, revision, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=repo_name, request_id=request_id, auth=get_credentials(), dry_run=options.dry_run) return # Mode #2: Fill-in a revision or trigger_test_jobs_only if options.fill_revision or options.trigger_tests_only: mgr.trigger_missing_jobs_for_revision( repo_name=repo_name, revision=revision, dry_run=options.dry_run, trigger_build_if_missing=not options.trigger_tests_only) return # Mode #3: Trigger jobs based on revision list modifiers if not (options.includes or options.exclude or options.failed_jobs or options.trigger_talos_for_build): job_names = options.buildernames # Mode 4 - Schedule every builder matching --includes and does not match --exclude. elif options.includes or options.exclude: _includes_excludes(options) # Mode 5: Use --failed-jobs to trigger jobs for particular revision elif options.failed_jobs: job_names = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=WARNING) elif options.trigger_talos_for_build: trigger_talos_jobs_for_build( buildername=options.buildernames[0], revision=revision, times=2, dry_run=options.dry_run, ) exit(0) for buildername in job_names: revlist = determine_revlist(repo_url=repo_url, buildername=buildername, rev=revision, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) _print_treeherder_link(revlist=revlist, repo_name=repo_name, buildername=buildername, revision=revision, log=LOG, includes=options.includes, exclude=options.exclude) try: mgr.trigger_range( buildername=buildername, repo_name=repo_name, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=trigger_build_if_missing) except Exception as e: LOG.exception(e) exit(1)
def trigger_range(buildername, revisions, times=1, dry_run=False, files=None, extra_properties=None, trigger_build_if_missing=True): """Schedule the job named "buildername" ("times" times) in every revision on 'revisions'.""" repo_name = query_repo_name_from_buildername(buildername) repo_url = repositories.query_repo_url(repo_name) if revisions != []: LOG.info( "We want to have %s job(s) of %s on the following revisions: " % (times, buildername)) for r in revisions: LOG.info(" - %s" % r) for rev in revisions: LOG.info("") LOG.info("=== %s ===" % rev) if VALIDATE and not valid_revision(repo_url, rev): LOG.info( "We can't trigger anything on pushes without a valid revision." ) continue # 1) How many potentially completed jobs can we get for this buildername? matching_jobs = QUERY_SOURCE.get_matching_jobs(repo_name, rev, buildername) status_summary = StatusSummary(matching_jobs) # TODO: change this debug message when we have a less hardcoded _status_summary LOG.debug("We found %d pending/running jobs, %d successful jobs and " "%d failed jobs" % (status_summary.pending_jobs + status_summary.running_jobs, status_summary.successful_jobs, status_summary.failed_jobs)) if status_summary.potential_jobs >= times: LOG.info( "We have %d job(s) for '%s' which is enough for the %d job(s) we want." % (status_summary.potential_jobs, buildername, times)) else: # 2) If we have less potential jobs than 'times' instances then # we need to fill it in. LOG.info("We have found %d potential job(s) matching '%s' on %s. " "We need to trigger more." % (status_summary.potential_jobs, buildername, rev)) schedule_new_job = True # If a job matching what we want already exists, we can # use the retrigger API in self-serve to retrigger that # instead of creating a new arbitrary job if len(matching_jobs) > 0 and files is None: try: request_id = QUERY_SOURCE.get_buildapi_request_id( repo_name, matching_jobs[0]) make_retrigger_request( repo_name=repo_name, request_id=request_id, auth=get_credentials(), count=(times - status_summary.potential_jobs), dry_run=dry_run) schedule_new_job = False except (IndexError, ConnectionError, ReadTimeout, ValueError) as e: # Logging until we can determine why we get these errors # We should have one of these: # {'requests': [{'request_id': int]} # {'request_id': int} LOG.info(matching_jobs[0]) LOG.info(str(e)) LOG.warning("We failed to retrigger the job, however, " "we will try to schedule a new one.") # If no matching job exists, we have to trigger a new arbitrary job if schedule_new_job: list_of_requests = trigger_job( revision=rev, buildername=buildername, times=(times - status_summary.potential_jobs), dry_run=dry_run, files=files, extra_properties=extra_properties, trigger_build_if_missing=trigger_build_if_missing) if list_of_requests and any(req.status_code != 202 for req in list_of_requests): LOG.warning("Not all requests succeeded.")
def main(): options = parse_args() if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) if options.action == 'trigger-all-talos': trigger_all_talos_jobs(options.repo_name, options.rev, options.times, dry_run=options.dry_run) sys.exit(0) validate_options(options) if not options.dry_run and not valid_credentials(): sys.exit(-1) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: repo_name = query_repo_name_from_buildername(options.buildernames[0]) else: repo_name = options.repo_name repo_url = query_repo_url(repo_name) if options.rev == 'tip': revision = query_repo_tip(repo_url).changesets[0].node LOG.info("The tip of %s is %s", repo_name, revision) else: revision = query_push_by_revision(repo_url, options.rev, return_revision_list=True) # Schedule jobs through TaskCluster if --taskcluster option has been set to true if options.taskcluster: mgr = TaskClusterBuildbotManager(web_auth=True) else: mgr = BuildAPIManager() trigger_build_if_missing = options.trigger_build_if_missing if repo_name == 'try': trigger_build_if_missing = False # Mode 0: Backfill if options.backfill: manual_backfill(revision, options.buildernames[0], dry_run=options.dry_run) return # Mode 1: Trigger coalesced jobs if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status(repo_name, revision, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=repo_name, request_id=request_id, auth=get_credentials(), dry_run=options.dry_run) return # Mode #2: Fill-in a revision or trigger_test_jobs_only if options.fill_revision or options.trigger_tests_only: mgr.trigger_missing_jobs_for_revision( repo_name=repo_name, revision=revision, dry_run=options.dry_run, trigger_build_if_missing=not options.trigger_tests_only ) return # Mode #3: Trigger jobs based on revision list modifiers if not (options.includes or options.exclude or options.failed_jobs or options.trigger_talos_for_build): job_names = options.buildernames # Mode 4 - Schedule every builder matching --includes and does not match --exclude. elif options.includes or options.exclude: _includes_excludes(options) # Mode 5: Use --failed-jobs to trigger jobs for particular revision elif options.failed_jobs: job_names = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=WARNING) elif options.trigger_talos_for_build: trigger_talos_jobs_for_build( buildername=options.buildernames[0], revision=revision, times=2, dry_run=options.dry_run, ) exit(0) for buildername in job_names: revlist = determine_revlist( repo_url=repo_url, buildername=buildername, rev=revision, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) _print_treeherder_link( revlist=revlist, repo_name=repo_name, buildername=buildername, revision=revision, log=LOG, includes=options.includes, exclude=options.exclude) try: mgr.trigger_range( buildername=buildername, repo_name=repo_name, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=trigger_build_if_missing ) except Exception as e: LOG.exception(e) exit(1)
def _create_task(buildername, repo_name, revision, metadata=None, task_graph_id=None, parent_task_id=None, requires=None, properties={}, *args, **kwargs): """Return takcluster task to trigger a buildbot builder. This function creates a generic task with the minimum amount of information required for the buildbot-bridge to consider it valid. You can establish a list dependencies to other tasks through the requires field. :param buildername: The name of a buildbot builder. :type buildername: str :param repo_name: The name of a repository e.g. mozilla-inbound :type repo_name: str :param revision: Changeset ID of a revision. :type revision: str :param metadata: Metadata for the task. If not specified, generate it. :type metadata: json :param task_graph_id: TC graph id to which this task belongs to :type task_graph_id: str :param parent_task_id: Task from which to find artifacts. It is not a dependency. :type parent_task_id: str :param requires: List of taskIds of other tasks which this task depends on. :type requires: list :returns: TaskCluster graph :rtype: dict """ if not valid_builder(buildername): raise MozciError("The builder '%s' is not a valid one." % buildername) builder_info = get_buildername_metadata(buildername) if builder_info['repo_name'] != repo_name: raise MozciError( "The builder '%s' should be for repo: %s." % (buildername, repo_name) ) repo_url = query_repo_url(repo_name) push_info = query_push_by_revision(repo_url=repo_url, revision=revision) full_revision = str(push_info.changesets[0].node) # Needed because of bug 1195751 all_properties = { 'product': builder_info['product'], 'who': push_info.user, } all_properties.update(properties) all_properties.update(get_builder_extra_properties(buildername)) metadata = metadata if metadata is not None else \ generate_metadata(repo_name=repo_name, revision=revision, name=buildername) # The task's name is used in the task-graph-inspector to list all tasks # and using the buildername makes it easy for a person to recognize each job. metadata['name'] = buildername # XXX: We should validate that the parent task is a valid parent platform # e.g. do not schedule Windows tests against Linux builds task = create_task( repo_name=repo_name, revision=revision, taskGroupId=task_graph_id, workerType='buildbot-bridge', provisionerId='buildbot-bridge', payload={ 'buildername': buildername, 'sourcestamp': { 'branch': repo_name, 'revision': full_revision }, 'properties': all_properties, }, metadata=metadata, ) if requires: task['requires'] = requires # Setting a parent_task_id as a property allows Mozharness to # determine the artifacts we need for this job to run properly if parent_task_id: task['task']['payload']['properties']['parent_task_id'] = parent_task_id return task
def trigger_job(revision, buildername, times=1, files=None, dry_run=False, extra_properties=None, trigger_build_if_missing=True): """Trigger a job through self-serve. We return a list of all requests made. """ repo_name = query_repo_name_from_buildername(buildername) builder_to_trigger = None list_of_requests = [] repo_url = repositories.query_repo_url(repo_name) if VALIDATE and not valid_revision(repo_url, revision): return list_of_requests LOG.info("==> We want to trigger '%s' a total of %d time(s)." % (buildername, times)) LOG.info("") # Extra line to help visual of logs if VALIDATE and not valid_builder(buildername): LOG.error("The builder %s requested is invalid" % buildername) # XXX How should we exit cleanly? exit(-1) if files: builder_to_trigger = buildername _all_urls_reachable(files) else: builder_to_trigger, package_url, test_url = determine_trigger_objective( revision=revision, buildername=buildername, trigger_build_if_missing=trigger_build_if_missing, will_use_buildapi=True ) if builder_to_trigger != buildername and times != 1: # The user wants to trigger a downstream job, # however, we need a build job instead. # We should trigger the downstream job multiple times, however, # we only trigger the upstream jobs once. LOG.debug("Since we need to trigger a build job we don't need to " "trigger it %s times but only once." % times) if trigger_build_if_missing: LOG.info("In order to trigger %s %i times, " "please run the script again after %s ends." % (buildername, times, builder_to_trigger)) else: LOG.info("We won't trigger '%s' because there is no working build." % buildername) LOG.info("") times = 1 if builder_to_trigger: if dry_run: LOG.info("Dry-run: We were going to request '%s' %s times." % (builder_to_trigger, times)) # Running with dry_run being True will only output information trigger( builder=builder_to_trigger, revision=revision, files=[package_url, test_url], dry_run=dry_run, extra_properties=extra_properties ) else: for _ in range(times): req = trigger( builder=builder_to_trigger, revision=revision, files=[package_url, test_url], dry_run=dry_run, extra_properties=extra_properties ) if req is not None: list_of_requests.append(req) else: LOG.debug("Nothing needs to be triggered") # Cleanup old buildjson files. clean_directory() return list_of_requests
def trigger_job(revision, buildername, times=1, files=None, dry_run=False, extra_properties=None, trigger_build_if_missing=True): """Trigger a job through self-serve. We return a list of all requests made. """ repo_name = query_repo_name_from_buildername(buildername) builder_to_trigger = None list_of_requests = [] repo_url = repositories.query_repo_url(repo_name) if VALIDATE and not pushlog.valid_revision(repo_url, revision): return list_of_requests LOG.info( "===> We want to trigger '%s' on revision '%s' a total of %d time(s)." % (buildername, revision, times)) LOG.info("") # Extra line to help visual of logs if VALIDATE and not valid_builder(buildername): LOG.error("The builder %s requested is invalid" % buildername) # XXX How should we exit cleanly? exit(-1) if files: builder_to_trigger = buildername _all_urls_reachable(files) else: builder_to_trigger, package_url, test_url = determine_trigger_objective( revision=revision, buildername=buildername, trigger_build_if_missing=trigger_build_if_missing) if builder_to_trigger != buildername and times != 1: # The user wants to trigger a downstream job, # however, we need a build job instead. # We should trigger the downstream job multiple times, however, # we only trigger the upstream jobs once. LOG.debug("Since we need to trigger a build job we don't need to " "trigger it %s times but only once." % times) if trigger_build_if_missing: LOG.info("In order to trigger %s %i times, " "please run the script again after %s ends." % (buildername, times, builder_to_trigger)) else: LOG.info( "We won't trigger '%s' because there is no working build." % buildername) LOG.info("") times = 1 if builder_to_trigger: if dry_run: LOG.info("Dry-run: We were going to request '%s' %s times." % (builder_to_trigger, times)) # Running with dry_run being True will only output information trigger(builder=builder_to_trigger, revision=revision, files=[package_url, test_url], dry_run=dry_run, extra_properties=extra_properties) else: for _ in range(times): req = trigger(builder=builder_to_trigger, revision=revision, files=[package_url, test_url], dry_run=dry_run, extra_properties=extra_properties) if req is not None: list_of_requests.append(req) else: LOG.debug("Nothing needs to be triggered") # Cleanup old buildjson files. clean_directory() return list_of_requests
def test_query_repo_url_valid(self, query_repository): """Test query_repo_url with a mock value for query_repository.""" self.assertEquals(repositories.query_repo_url('repo1'), "https://hg.mozilla.org/releases/repo1")
def trigger_range(buildername, revisions, times=1, dry_run=False, files=None, extra_properties=None, trigger_build_if_missing=True): """Schedule the job named "buildername" ("times" times) in every revision on 'revisions'.""" repo_name = query_repo_name_from_buildername(buildername) repo_url = repositories.query_repo_url(repo_name) if revisions != []: LOG.info("We want to have %s job(s) of %s on the following revisions: " % (times, buildername)) for r in revisions: LOG.info(" - %s" % r) for rev in revisions: LOG.info("") LOG.info("=== %s ===" % rev) if VALIDATE and not valid_revision(repo_url, rev): LOG.info("We can't trigger anything on pushes without a valid revision.") continue LOG.info("We want to have %s job(s) of %s" % (times, buildername)) # 1) How many potentially completed jobs can we get for this buildername? matching_jobs = QUERY_SOURCE.get_matching_jobs(repo_name, rev, buildername) status_summary = StatusSummary(matching_jobs) # TODO: change this debug message when we have a less hardcoded _status_summary LOG.debug("We found %d pending/running jobs, %d successful jobs and " "%d failed jobs" % (status_summary.pending_jobs + status_summary.running_jobs, status_summary.successful_jobs, status_summary.failed_jobs)) if status_summary.potential_jobs >= times: LOG.info("We have %d job(s) for '%s' which is enough for the %d job(s) we want." % (status_summary.potential_jobs, buildername, times)) else: # 2) If we have less potential jobs than 'times' instances then # we need to fill it in. LOG.info("We have found %d potential job(s) matching '%s' on %s. " "We need to trigger more." % (status_summary.potential_jobs, buildername, rev)) # If a job matching what we want already exists, we can # use the retrigger API in self-serve to retrigger that # instead of creating a new arbitrary job if len(matching_jobs) > 0 and files is None: request_id = QUERY_SOURCE.get_buildapi_request_id(repo_name, matching_jobs[0]) make_retrigger_request( repo_name=repo_name, request_id=request_id, auth=get_credentials(), count=(times - status_summary.potential_jobs), dry_run=dry_run) # If no matching job exists, we have to trigger a new arbitrary job else: list_of_requests = trigger_job( revision=rev, buildername=buildername, times=(times - status_summary.potential_jobs), dry_run=dry_run, files=files, extra_properties=extra_properties, trigger_build_if_missing=trigger_build_if_missing) if list_of_requests and any(req.status_code != 202 for req in list_of_requests): LOG.warning("Not all requests succeeded.")
def _create_task(buildername, repo_name, revision, task_graph_id=None, parent_task_id=None, requires=None, properties={}): """Return takcluster task to trigger a buildbot builder. This function creates a generic task with the minimum amount of information required for the buildbot-bridge to consider it valid. You can establish a list dependencies to other tasks through the requires field. :param buildername: The name of a buildbot builder. :type buildername: str :param repo_name: The name of a repository e.g. mozilla-inbound :type repo_name: str :param revision: Changeset ID of a revision. :type revision: str :param task_graph_id: TC graph id to which this task belongs to :type task_graph_id: str :param parent_task_id: Task from which to find artifacts. It is not a dependency. :type parent_task_id: str :param requires: List of taskIds of other tasks which this task depends on. :type requires: list :returns: TaskCluster graph :rtype: dict """ if not valid_builder(buildername): raise MozciError("The builder '%s' is not a valid one." % buildername) builder_info = get_buildername_metadata(buildername) if builder_info['repo_name'] != repo_name: raise MozciError("The builder '%s' should be for repo: %s." % (buildername, repo_name)) repo_url = query_repo_url(repo_name) push_info = query_revision_info(repo_url, revision) # Needed because of bug 1195751 all_properties = { 'product': builder_info['product'], 'who': push_info['user'], } all_properties.update(properties) # XXX: We should validate that the parent task is a valid parent platform # e.g. do not schedule Windows tests against Linux builds task = create_task(repo_name=repo_name, revision=revision, taskGroupId=task_graph_id, workerType='buildbot-bridge', provisionerId='buildbot-bridge', payload={ 'buildername': buildername, 'sourcestamp': { 'branch': repo_name, 'revision': revision }, 'properties': all_properties, }, metadata_name=buildername) if requires: task['requires'] = requires # Setting a parent_task_id as a property allows Mozharness to # determine the artifacts we need for this job to run properly if parent_task_id: task['task']['payload']['properties'][ 'parent_task_id'] = parent_task_id return task
def main(): parser = ArgumentParser() parser.add_argument("--debug", action="store_true", dest="debug", help="set debug for logging.") parser.add_argument("--dry-run", action="store_true", dest="dry_run", help="Dry run. No real actions are taken.") parser.add_argument("--repo-name", action="store", dest="repo_name", type=str, help="Repository name, e.g. mozilla-inbound.") parser.add_argument("--revision", action="store", dest="revision", type=str, help="12-char representing a push.") parser.add_argument("--trigger-from-task-id", action="store", dest="trigger_from_task_id", type=str, help="Trigger builders based on build task (use with " "--builders).") parser.add_argument("--builders", action="store", dest="builders", type=str, help="Use this if you want to pass a list of builders " "(e.g. \"['builder 1']\".") parser.add_argument("--children-of", action="store", dest="children_of", type=str, help="This allows you to request a list of all the associated " "test jobs to a build job.") parser.add_argument("-g", "--graph", action="store", dest="builders_graph", help='Graph of builders in the form of: ' 'dict(builder: [dep_builders].') options = parser.parse_args() if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging() assert options.repo_name and options.revision, \ "Make sure you specify --repo-name and --revision" if not options.dry_run and not credentials_available(): LOG.error("Either choose --dry-run or provide credentials") sys.exit(1) repo_url = query_repo_url(options.repo_name) revision = query_push_by_revision(repo_url=repo_url, revision=options.revision, return_revision_list=True) builders = None if options.builders: builders = ast.literal_eval(options.builders) else: builders = get_downstream_jobs(options.children_of) if options.trigger_from_task_id and builders: trigger_builders_based_on_task_id( repo_name=options.repo_name, revision=revision, task_id=options.trigger_from_task_id, builders=builders, dry_run=options.dry_run ) elif builders: tc_graph = generate_tc_graph_from_builders( builders=builders, repo_name=options.repo_name, revision=revision ) mgr = TaskClusterManager() mgr.schedule_graph( task_graph=tc_graph, dry_run=options.dry_run ) elif options.builders_graph: mgr = TaskClusterBuildbotManager() mgr.schedule_graph( repo_name=options.repo_name, revision=revision, builders_graph=ast.literal_eval(options.builders_graph), dry_run=options.dry_run ) else: print "Please read the help menu to know what options are available to you."
def main(): options = parse_args() repo_url = query_repo_url(options.repo) if not valid_credentials(): sys.exit(-1) if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) if options.rev == 'tip': revision = query_repo_tip(repo_url) LOG.info("The tip of %s is %s", options.repo, revision) else: revision = query_full_revision_info(repo_url, options.rev) filters_in = options.includes.split(',') + [options.repo] filters_out = [] if options.exclude: filters_out = options.exclude.split(',') buildernames = filter_buildernames( buildernames=query_builders(repo_name=options.repo), include=filters_in, exclude=filters_out) if len(buildernames) == 0: LOG.info("0 jobs match these filters, please try again.") return cont = raw_input( "%i jobs will be triggered, do you wish to continue? y/n/d (d=show details) " % len(buildernames)) if cont.lower() == 'd': LOG.info("The following jobs will be triggered: \n %s" % '\n'.join(buildernames)) cont = raw_input("Do you wish to continue? y/n ") if cont.lower() != 'y': exit(1) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) for buildername in buildernames: trigger_range( buildername=buildername, revisions=[revision], times=options.times, dry_run=options.dry_run, ) LOG.info('https://treeherder.mozilla.org/#/jobs?%s' % urllib.urlencode( { 'repo': query_repo_name_from_buildername(buildername), 'fromchange': revision, 'tochange': revision, 'filter-searchStr': buildername }))
def trigger_range(buildername, revisions, times=1, dry_run=False, files=None, extra_properties=None, trigger_build_if_missing=True): """Schedule the job named "buildername" ("times" times) in every revision on 'revisions'.""" repo_name = query_repo_name_from_buildername(buildername) repo_url = repositories.query_repo_url(repo_name) if revisions != []: LOG.info("We want to have %s job(s) of %s on revisions %s" % (times, buildername, str(revisions))) for rev in revisions: LOG.info("") LOG.info("=== %s ===" % rev) if VALIDATE and not pushlog.valid_revision(repo_url, rev): LOG.info( "We can't trigger anything on pushes without a valid revision." ) continue LOG.info("We want to have %s job(s) of %s on revision %s" % (times, buildername, rev)) # 1) How many potentially completed jobs can we get for this buildername? matching_jobs = QUERY_SOURCE.get_matching_jobs(repo_name, rev, buildername) successful_jobs, pending_jobs, running_jobs, _, failed_jobs = \ _status_summary(matching_jobs) potential_jobs = pending_jobs + running_jobs + successful_jobs + failed_jobs # TODO: change this debug message when we have a less hardcoded _status_summary LOG.debug("We found %d pending/running jobs, %d successful jobs and " "%d failed jobs" % (pending_jobs + running_jobs, successful_jobs, failed_jobs)) if potential_jobs >= times: LOG.info( "We have %d job(s) for '%s' which is enough for the %d job(s) we want." % (potential_jobs, buildername, times)) else: # 2) If we have less potential jobs than 'times' instances then # we need to fill it in. LOG.info("We have found %d potential job(s) matching '%s' on %s. " "We need to trigger more." % (potential_jobs, buildername, rev)) # If a job matching what we want already exists, we can # use the retrigger API in self-serve to retrigger that # instead of creating a new arbitrary job if len(matching_jobs) > 0 and files is None: request_id = QUERY_SOURCE.get_buildapi_request_id( repo_name, matching_jobs[0]) make_retrigger_request(repo_name=repo_name, request_id=request_id, auth=get_credentials(), count=(times - potential_jobs), dry_run=dry_run) # If no matching job exists, we have to trigger a new arbitrary job else: list_of_requests = trigger_job( revision=rev, buildername=buildername, times=(times - potential_jobs), dry_run=dry_run, files=files, extra_properties=extra_properties, trigger_build_if_missing=trigger_build_if_missing) if list_of_requests and any(req.status_code != 202 for req in list_of_requests): LOG.warning("Not all requests succeeded.")
def main(): options = parse_args() validate_options(options) repo_url = query_repo_url(options.repo_name) if not valid_credentials(): sys.exit(-1) if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: options.repo_name = query_repo_name_from_buildername(options.buildernames[0]) if options.rev == 'tip': revision = query_repo_tip(repo_url) LOG.info("The tip of %s is %s", options.repo_name, revision) else: revision = query_full_revision_info(repo_url, options.rev) # Mode 1: Trigger coalesced jobs if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status(options.repo_name, revision, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=options.repo_name, request_id=request_id, dry_run=options.dry_run) return # Mode #2: Fill-in a revision if options.fill_revision: trigger_missing_jobs_for_revision( repo_name=options.repo_name, revision=revision, dry_run=options.dry_run ) return # Mode #3: Trigger jobs based on revision list modifiers for buildername in options.buildernames: revlist = determine_revlist( repo_url=repo_url, buildername=buildername, rev=revision, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) try: trigger_range( buildername=buildername, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=options.trigger_build_if_missing ) except Exception, e: LOG.exception(e) exit(1) if revlist: LOG.info('https://treeherder.mozilla.org/#/jobs?%s' % urllib.urlencode({'repo': options.repo_name, 'fromchange': revlist[-1], 'tochange': revlist[0], 'filter-searchStr': buildername}))