def test_query_push_by_revision(self, get): push = query_push_by_revision(repo_url=self.repo_url, revision=self.revision) assert push is not None assert push.id == '87833' assert len(push.changesets) == 1 changeset = push.changesets[0] assert changeset.node == "71e69424094d2f86c51ba544fd861d65a578a0f2"
def generate_metadata(repo_name, revision, name, description='Task graph generated via Mozilla CI tools'): """ Generate metadata based on input :param repo_name: e.g. alder, mozilla-central :type repo_name: str :param revision: 12-chars representing a push :type revision: str :param name: Human readable name of task-graph, give people finding this an idea what this graph is about. :type name: str :param description: Human readable description of task-graph, explain what it does! :type description: str """ LOG.debug("Determining metadata.") repo_url = query_repo_url(repo_name) push_info = query_push_by_revision(repo_url=repo_url, revision=revision) return { 'name': name, 'description': description, 'owner': push_info.user, 'source': '%s/rev/%s' % (repo_url, revision), }
def generate_metadata(repo_name, revision, name, description='Task graph generated via Mozilla CI tools'): """ Generate metadata based on input :param repo_name: e.g. alder, mozilla-central :type repo_name: str :param revision: 12-chars representing a push :type revision: str :param name: Human readable name of task-graph, give people finding this an idea what this graph is about. :type name: str :param description: Human readable description of task-graph, explain what it does! :type description: str """ repo_url = query_repo_url(repo_name) push_info = query_push_by_revision(repo_url=repo_url, revision=revision) return { 'name': name, 'description': description, 'owner': push_info.user, 'source': '%s/rev/%s' % (repo_url, revision), }
def _create_task(buildername, repo_name, revision, metadata=None, task_graph_id=None, parent_task_id=None, requires=None, properties={}, *args, **kwargs): """Return takcluster task to trigger a buildbot builder. This function creates a generic task with the minimum amount of information required for the buildbot-bridge to consider it valid. You can establish a list dependencies to other tasks through the requires field. :param buildername: The name of a buildbot builder. :type buildername: str :param repo_name: The name of a repository e.g. mozilla-inbound :type repo_name: str :param revision: Changeset ID of a revision. :type revision: str :param metadata: Metadata for the task. If not specified, generate it. :type metadata: json :param task_graph_id: TC graph id to which this task belongs to :type task_graph_id: str :param parent_task_id: Task from which to find artifacts. It is not a dependency. :type parent_task_id: str :param requires: List of taskIds of other tasks which this task depends on. :type requires: list :returns: TaskCluster graph :rtype: dict """ if not valid_builder(buildername): raise MozciError("The builder '%s' is not a valid one." % buildername) builder_info = get_buildername_metadata(buildername) if builder_info['repo_name'] != repo_name: raise MozciError( "The builder '%s' should be for repo: %s." % (buildername, repo_name) ) repo_url = query_repo_url(repo_name) push_info = query_push_by_revision(repo_url=repo_url, revision=revision) full_revision = str(push_info.changesets[0].node) # Needed because of bug 1195751 all_properties = { 'product': builder_info['product'], 'who': push_info.user, } all_properties.update(properties) all_properties.update(get_builder_extra_properties(buildername)) metadata = metadata if metadata is not None else \ generate_metadata(repo_name=repo_name, revision=revision, name=buildername) # The task's name is used in the task-graph-inspector to list all tasks # and using the buildername makes it easy for a person to recognize each job. metadata['name'] = buildername # XXX: We should validate that the parent task is a valid parent platform # e.g. do not schedule Windows tests against Linux builds task = create_task( repo_name=repo_name, revision=revision, taskGroupId=task_graph_id, workerType='buildbot-bridge', provisionerId='buildbot-bridge', payload={ 'buildername': buildername, 'sourcestamp': { 'branch': repo_name, 'revision': full_revision }, 'properties': all_properties, }, metadata=metadata, ) if requires: task['requires'] = requires # Setting a parent_task_id as a property allows Mozharness to # determine the artifacts we need for this job to run properly if parent_task_id: task['task']['payload']['properties']['parent_task_id'] = parent_task_id return task
def main(): parser = ArgumentParser() parser.add_argument("--debug", action="store_true", dest="debug", help="set debug for logging.") parser.add_argument("--dry-run", action="store_true", dest="dry_run", help="Dry run. No real actions are taken.") parser.add_argument("--repo-name", action="store", dest="repo_name", type=str, help="Repository name, e.g. mozilla-inbound.") parser.add_argument("--revision", action="store", dest="revision", type=str, help="12-char representing a push.") parser.add_argument("--trigger-from-task-id", action="store", dest="trigger_from_task_id", type=str, help="Trigger builders based on build task (use with " "--builders).") parser.add_argument("--builders", action="store", dest="builders", type=str, help="Use this if you want to pass a list of builders " "(e.g. \"['builder 1']\".") parser.add_argument("--children-of", action="store", dest="children_of", type=str, help="This allows you to request a list of all the associated " "test jobs to a build job.") parser.add_argument("-g", "--graph", action="store", dest="builders_graph", help='Graph of builders in the form of: ' 'dict(builder: [dep_builders].') options = parser.parse_args() if options.debug: setup_logging(logging.DEBUG) else: setup_logging() assert options.repo_name and options.revision, \ "Make sure you specify --repo-name and --revision" if not options.dry_run and not credentials_available(): sys.exit(1) repo_url = query_repo_url(options.repo_name) revision = query_push_by_revision(repo_url=repo_url, revision=options.revision, return_revision_list=True) builders = None if options.builders: builders = ast.literal_eval(options.builders) else: builders = get_downstream_jobs(options.children_of) if options.trigger_from_task_id and builders: trigger_builders_based_on_task_id( repo_name=options.repo_name, revision=revision, task_id=options.trigger_from_task_id, builders=builders, dry_run=options.dry_run ) elif builders: tc_graph = generate_tc_graph_from_builders( builders=builders, repo_name=options.repo_name, revision=revision ) mgr = TaskClusterManager() mgr.schedule_graph( task_graph=tc_graph, dry_run=options.dry_run ) elif options.builders_graph: mgr = TaskClusterBuildbotManager() mgr.schedule_graph( repo_name=options.repo_name, revision=revision, builders_graph=ast.literal_eval(options.builders_graph), dry_run=options.dry_run ) else: print "Please read the help menu to know what options are available to you."
def main(): options = parse_args() if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) validate_options(options) if not valid_credentials(): sys.exit(-1) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: repo_name = query_repo_name_from_buildername(options.buildernames[0]) else: repo_name = options.repo_name repo_url = query_repo_url(repo_name) if options.rev == 'tip': revision = query_repo_tip(repo_url).changesets[0].node LOG.info("The tip of %s is %s", repo_name, revision) else: revision = query_push_by_revision(repo_url, options.rev, return_revision_list=True) # Schedule jobs through TaskCluster if --taskcluster option has been set to true if options.taskcluster: mgr = TaskClusterBuildbotManager() else: mgr = BuildAPIManager() trigger_build_if_missing = options.trigger_build_if_missing if repo_name == 'try': trigger_build_if_missing = False # Mode 1: Trigger coalesced jobs if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status(repo_name, revision, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=repo_name, request_id=request_id, auth=get_credentials(), dry_run=options.dry_run) return # Mode #2: Fill-in a revision or trigger_test_jobs_only if options.fill_revision or options.trigger_tests_only: mgr.trigger_missing_jobs_for_revision( repo_name=repo_name, revision=revision, dry_run=options.dry_run, trigger_build_if_missing=not options.trigger_tests_only ) return # Mode #3: Trigger jobs based on revision list modifiers if not (options.includes or options.exclude or options.failed_jobs): job_names = options.buildernames # Mode 4 - Schedule every builder matching --includes and does not match --exclude. elif options.includes or options.exclude: filters_in = options.includes.split(',') + [repo_name] filters_out = [] if options.exclude: filters_out = options.exclude.split(',') job_names = filter_buildernames( buildernames=query_builders(repo_name=repo_name), include=filters_in, exclude=filters_out ) if len(job_names) == 0: LOG.info("0 jobs match these filters. please try again.") return if options.existing_only: # We query all succesful jobs for a given revision and filter # them by include/exclude filters. trigger_build_if_missing = False successful_jobs = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=SUCCESS) # We will filter out all the existing job from those successful job we have. job_names = [buildername for buildername in successful_jobs if buildername in job_names] cont = raw_input("The ones which have existing builds out of %i jobs will be triggered,\ do you wish to continue? y/n/d (d=show details) " % len(job_names)) else: cont = raw_input("%i jobs will be triggered, do you wish to continue? \ y/n/d (d=show details) " % len(job_names)) if cont.lower() == 'd': LOG.info("The following jobs will be triggered: \n %s" % '\n'.join(job_names)) cont = raw_input("Do you wish to continue? y/n ") if cont.lower() != 'y': exit(1) # Mode 5: Use --failed-jobs to trigger jobs for particular revision elif options.failed_jobs: job_names = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=WARNING) for buildername in job_names: revlist = determine_revlist( repo_url=repo_url, buildername=buildername, rev=revision, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) _print_treeherder_link( revlist=revlist, repo_name=repo_name, buildername=buildername, revision=revision, log=LOG, includes=options.includes, exclude=options.exclude) try: mgr.trigger_range( buildername=buildername, repo_name=repo_name, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=trigger_build_if_missing ) except Exception, e: LOG.exception(e) exit(1)
def test_query_push_by_revision_return_changesets(self, get): changset = query_push_by_revision(repo_url=self.repo_url, revision=self.revision, return_revision_list=True) assert changset == "71e69424094d2f86c51ba544fd861d65a578a0f2"
def main(): options = parse_args() if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) validate_options(options) if not options.dry_run and not valid_credentials(): sys.exit(-1) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: repo_name = query_repo_name_from_buildername(options.buildernames[0]) else: repo_name = options.repo_name repo_url = query_repo_url(repo_name) if options.rev == 'tip': revision = query_repo_tip(repo_url).changesets[0].node LOG.info("The tip of %s is %s", repo_name, revision) else: revision = query_push_by_revision(repo_url, options.rev, return_revision_list=True) # Schedule jobs through TaskCluster if --taskcluster option has been set to true if options.taskcluster: mgr = TaskClusterBuildbotManager() else: mgr = BuildAPIManager() trigger_build_if_missing = options.trigger_build_if_missing if repo_name == 'try': trigger_build_if_missing = False # Mode 0: Backfill if options.backfill: manual_backfill(revision, options.buildernames[0], dry_run=options.dry_run) return # Mode 1: Trigger coalesced jobs if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status(repo_name, revision, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=repo_name, request_id=request_id, auth=get_credentials(), dry_run=options.dry_run) return # Mode #2: Fill-in a revision or trigger_test_jobs_only if options.fill_revision or options.trigger_tests_only: mgr.trigger_missing_jobs_for_revision( repo_name=repo_name, revision=revision, dry_run=options.dry_run, trigger_build_if_missing=not options.trigger_tests_only ) return # Mode #3: Trigger jobs based on revision list modifiers if not (options.includes or options.exclude or options.failed_jobs): job_names = options.buildernames # Mode 4 - Schedule every builder matching --includes and does not match --exclude. elif options.includes or options.exclude: filters_in = options.includes.split(',') + [repo_name] filters_out = [] if options.exclude: filters_out = options.exclude.split(',') job_names = filter_buildernames( buildernames=query_builders(repo_name=repo_name), include=filters_in, exclude=filters_out ) if len(job_names) == 0: LOG.info("0 jobs match these filters. please try again.") return if options.existing_only: # We query all successful jobs for a given revision and filter # them by include/exclude filters. trigger_build_if_missing = False successful_jobs = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=SUCCESS) # We will filter out all the existing job from those successful job we have. job_names = [buildername for buildername in successful_jobs if buildername in job_names] cont = raw_input("The ones which have existing builds out of %i jobs will be triggered,\ do you wish to continue? y/n/d (d=show details) " % len(job_names)) else: cont = raw_input("%i jobs will be triggered, do you wish to continue? \ y/n/d (d=show details) " % len(job_names)) if cont.lower() == 'd': LOG.info("The following jobs will be triggered: \n %s" % '\n'.join(job_names)) cont = raw_input("Do you wish to continue? y/n ") if cont.lower() != 'y': exit(1) # Mode 5: Use --failed-jobs to trigger jobs for particular revision elif options.failed_jobs: job_names = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=WARNING) for buildername in job_names: revlist = determine_revlist( repo_url=repo_url, buildername=buildername, rev=revision, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) _print_treeherder_link( revlist=revlist, repo_name=repo_name, buildername=buildername, revision=revision, log=LOG, includes=options.includes, exclude=options.exclude) try: mgr.trigger_range( buildername=buildername, repo_name=repo_name, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=trigger_build_if_missing ) except Exception, e: LOG.exception(e) exit(1)
def trigger_job(revision, buildername, times=1, files=None, dry_run=False, extra_properties={}, trigger_build_if_missing=True): """Trigger a job through self-serve. We return a list of all requests made. """ if not extra_properties: extra_properties = {} extra_properties.update(get_builder_extra_properties(buildername)) repo_name = query_repo_name_from_buildername(buildername) builder_to_trigger = None list_of_requests = [] repo_url = repositories.query_repo_url(repo_name) if len(revision) != 40: LOG.info('We are going to convert the revision into 40 chars ({}).'.format(revision)) push_info = query_push_by_revision(repo_url, revision) revision = push_info.changesets[0].node assert len(revision) == 40, 'This should have been a 40 char revision.' if VALIDATE and not valid_revision(repo_url, revision): return list_of_requests LOG.info("==> We want to trigger '%s' a total of %d time(s)." % (buildername, times)) if VALIDATE and not valid_builder(buildername): LOG.error("The builder %s requested is invalid" % buildername) # XXX How should we exit cleanly? exit(-1) if files: builder_to_trigger = buildername _all_urls_reachable(files) else: builder_to_trigger, package_url, test_url = determine_trigger_objective( revision=revision, buildername=buildername, trigger_build_if_missing=trigger_build_if_missing, will_use_buildapi=True ) if builder_to_trigger != buildername and times != 1: # The user wants to trigger a downstream job, # however, we need a build job instead. # We should trigger the downstream job multiple times, however, # we only trigger the upstream jobs once. LOG.debug("Since we need to trigger a build job we don't need to " "trigger it %s times but only once." % times) if trigger_build_if_missing: LOG.info("In order to trigger %s %i times, " "please run the script again after %s ends." % (buildername, times, builder_to_trigger)) else: LOG.info("We won't trigger '%s' because there is no working build." % buildername) LOG.info("") times = 1 if builder_to_trigger: if dry_run: LOG.info("Dry-run: We were going to request '%s' %s times." % (builder_to_trigger, times)) # Running with dry_run being True will only output information trigger( builder=builder_to_trigger, revision=revision, files=[package_url, test_url], dry_run=dry_run, extra_properties=extra_properties ) else: for _ in range(times): req = trigger( builder=builder_to_trigger, revision=revision, files=[package_url, test_url], dry_run=dry_run, extra_properties=extra_properties ) if req is not None: list_of_requests.append(req) else: LOG.debug("Nothing needs to be triggered") # Cleanup old buildjson files. clean_directory() return list_of_requests
def main(): parser = ArgumentParser() parser.add_argument("--debug", action="store_true", dest="debug", help="set debug for logging.") parser.add_argument("--dry-run", action="store_true", dest="dry_run", help="Dry run. No real actions are taken.") parser.add_argument("--repo-name", action="store", dest="repo_name", type=str, help="Repository name, e.g. mozilla-inbound.") parser.add_argument("--revision", action="store", dest="revision", type=str, help="12-char representing a push.") parser.add_argument("--trigger-from-task-id", action="store", dest="trigger_from_task_id", type=str, help="Trigger builders based on build task (use with " "--builders).") parser.add_argument("--builders", action="store", dest="builders", type=str, help="Use this if you want to pass a list of builders " "(e.g. \"['builder 1']\".") parser.add_argument("--children-of", action="store", dest="children_of", type=str, help="This allows you to request a list of all the associated " "test jobs to a build job.") parser.add_argument("-g", "--graph", action="store", dest="builders_graph", help='Graph of builders in the form of: ' 'dict(builder: [dep_builders].') options = parser.parse_args() if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging() assert options.repo_name and options.revision, \ "Make sure you specify --repo-name and --revision" if not options.dry_run and not credentials_available(): LOG.error("Either choose --dry-run or provide credentials") sys.exit(1) repo_url = query_repo_url(options.repo_name) revision = query_push_by_revision(repo_url=repo_url, revision=options.revision, return_revision_list=True) builders = None if options.builders: builders = ast.literal_eval(options.builders) else: builders = get_downstream_jobs(options.children_of) if options.trigger_from_task_id and builders: trigger_builders_based_on_task_id( repo_name=options.repo_name, revision=revision, task_id=options.trigger_from_task_id, builders=builders, dry_run=options.dry_run ) elif builders: tc_graph = generate_tc_graph_from_builders( builders=builders, repo_name=options.repo_name, revision=revision ) mgr = TaskClusterManager() mgr.schedule_graph( task_graph=tc_graph, dry_run=options.dry_run ) elif options.builders_graph: mgr = TaskClusterBuildbotManager() mgr.schedule_graph( repo_name=options.repo_name, revision=revision, builders_graph=ast.literal_eval(options.builders_graph), dry_run=options.dry_run ) else: print "Please read the help menu to know what options are available to you."
def trigger_job(revision, buildername, times=1, files=None, dry_run=False, extra_properties={}, trigger_build_if_missing=True): """Trigger a job through self-serve. We return a list of all requests made. """ if not extra_properties: extra_properties = {} extra_properties.update(get_builder_extra_properties(buildername)) repo_name = query_repo_name_from_buildername(buildername) builder_to_trigger = None list_of_requests = [] repo_url = repositories.query_repo_url(repo_name) if len(revision) != 40: LOG.warning( 'We should not be using revisions less than 40 chars ({}).'.format( revision)) push_info = query_push_by_revision(repo_url, revision) revision = push_info.changesets[0].node assert len(revision) == 40, 'This should have been a 40 char revision.' if VALIDATE and not valid_revision(repo_url, revision): return list_of_requests LOG.info("==> We want to trigger '%s' a total of %d time(s)." % (buildername, times)) LOG.info("") # Extra line to help visual of logs if VALIDATE and not valid_builder(buildername): LOG.error("The builder %s requested is invalid" % buildername) # XXX How should we exit cleanly? exit(-1) if files: builder_to_trigger = buildername _all_urls_reachable(files) else: builder_to_trigger, package_url, test_url = determine_trigger_objective( revision=revision, buildername=buildername, trigger_build_if_missing=trigger_build_if_missing, will_use_buildapi=True) if builder_to_trigger != buildername and times != 1: # The user wants to trigger a downstream job, # however, we need a build job instead. # We should trigger the downstream job multiple times, however, # we only trigger the upstream jobs once. LOG.debug("Since we need to trigger a build job we don't need to " "trigger it %s times but only once." % times) if trigger_build_if_missing: LOG.info("In order to trigger %s %i times, " "please run the script again after %s ends." % (buildername, times, builder_to_trigger)) else: LOG.info( "We won't trigger '%s' because there is no working build." % buildername) LOG.info("") times = 1 if builder_to_trigger: if dry_run: LOG.info("Dry-run: We were going to request '%s' %s times." % (builder_to_trigger, times)) # Running with dry_run being True will only output information trigger(builder=builder_to_trigger, revision=revision, files=[package_url, test_url], dry_run=dry_run, extra_properties=extra_properties) else: for _ in range(times): req = trigger(builder=builder_to_trigger, revision=revision, files=[package_url, test_url], dry_run=dry_run, extra_properties=extra_properties) if req is not None: list_of_requests.append(req) else: LOG.debug("Nothing needs to be triggered") # Cleanup old buildjson files. clean_directory() return list_of_requests
def main(): options = parse_args() if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) if options.action == 'trigger-all-talos': trigger_all_talos_jobs(options.repo_name, options.rev, options.times, dry_run=options.dry_run) sys.exit(0) validate_options(options) if not options.dry_run and not valid_credentials(): sys.exit(-1) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: repo_name = query_repo_name_from_buildername(options.buildernames[0]) else: repo_name = options.repo_name repo_url = query_repo_url(repo_name) if options.rev == 'tip': revision = query_repo_tip(repo_url).changesets[0].node LOG.info("The tip of %s is %s", repo_name, revision) else: revision = query_push_by_revision(repo_url, options.rev, return_revision_list=True) # Schedule jobs through TaskCluster if --taskcluster option has been set to true if options.taskcluster: mgr = TaskClusterBuildbotManager(web_auth=True) else: mgr = BuildAPIManager() trigger_build_if_missing = options.trigger_build_if_missing if repo_name == 'try': trigger_build_if_missing = False # Mode 0: Backfill if options.backfill: manual_backfill(revision, options.buildernames[0], dry_run=options.dry_run) return # Mode 1: Trigger coalesced jobs if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status( repo_name, revision, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=repo_name, request_id=request_id, auth=get_credentials(), dry_run=options.dry_run) return # Mode #2: Fill-in a revision or trigger_test_jobs_only if options.fill_revision or options.trigger_tests_only: mgr.trigger_missing_jobs_for_revision( repo_name=repo_name, revision=revision, dry_run=options.dry_run, trigger_build_if_missing=not options.trigger_tests_only) return # Mode #3: Trigger jobs based on revision list modifiers if not (options.includes or options.exclude or options.failed_jobs or options.trigger_talos_for_build): job_names = options.buildernames # Mode 4 - Schedule every builder matching --includes and does not match --exclude. elif options.includes or options.exclude: _includes_excludes(options) # Mode 5: Use --failed-jobs to trigger jobs for particular revision elif options.failed_jobs: job_names = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=WARNING) elif options.trigger_talos_for_build: trigger_talos_jobs_for_build( buildername=options.buildernames[0], revision=revision, times=2, dry_run=options.dry_run, ) exit(0) for buildername in job_names: revlist = determine_revlist(repo_url=repo_url, buildername=buildername, rev=revision, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) _print_treeherder_link(revlist=revlist, repo_name=repo_name, buildername=buildername, revision=revision, log=LOG, includes=options.includes, exclude=options.exclude) try: mgr.trigger_range( buildername=buildername, repo_name=repo_name, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=trigger_build_if_missing) except Exception as e: LOG.exception(e) exit(1)
def main(): options = parse_args() if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) if options.action == 'trigger-all-talos': trigger_all_talos_jobs(options.repo_name, options.rev, options.times, dry_run=options.dry_run) sys.exit(0) validate_options(options) if not options.dry_run and not valid_credentials(): sys.exit(-1) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: repo_name = query_repo_name_from_buildername(options.buildernames[0]) else: repo_name = options.repo_name repo_url = query_repo_url(repo_name) if options.rev == 'tip': revision = query_repo_tip(repo_url).changesets[0].node LOG.info("The tip of %s is %s", repo_name, revision) else: revision = query_push_by_revision(repo_url, options.rev, return_revision_list=True) # Schedule jobs through TaskCluster if --taskcluster option has been set to true if options.taskcluster: mgr = TaskClusterBuildbotManager(web_auth=True) else: mgr = BuildAPIManager() trigger_build_if_missing = options.trigger_build_if_missing if repo_name == 'try': trigger_build_if_missing = False # Mode 0: Backfill if options.backfill: manual_backfill(revision, options.buildernames[0], dry_run=options.dry_run) return # Mode 1: Trigger coalesced jobs if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status(repo_name, revision, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=repo_name, request_id=request_id, auth=get_credentials(), dry_run=options.dry_run) return # Mode #2: Fill-in a revision or trigger_test_jobs_only if options.fill_revision or options.trigger_tests_only: mgr.trigger_missing_jobs_for_revision( repo_name=repo_name, revision=revision, dry_run=options.dry_run, trigger_build_if_missing=not options.trigger_tests_only ) return # Mode #3: Trigger jobs based on revision list modifiers if not (options.includes or options.exclude or options.failed_jobs or options.trigger_talos_for_build): job_names = options.buildernames # Mode 4 - Schedule every builder matching --includes and does not match --exclude. elif options.includes or options.exclude: _includes_excludes(options) # Mode 5: Use --failed-jobs to trigger jobs for particular revision elif options.failed_jobs: job_names = TreeherderApi().find_all_jobs_by_status( repo_name=repo_name, revision=revision, status=WARNING) elif options.trigger_talos_for_build: trigger_talos_jobs_for_build( buildername=options.buildernames[0], revision=revision, times=2, dry_run=options.dry_run, ) exit(0) for buildername in job_names: revlist = determine_revlist( repo_url=repo_url, buildername=buildername, rev=revision, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) _print_treeherder_link( revlist=revlist, repo_name=repo_name, buildername=buildername, revision=revision, log=LOG, includes=options.includes, exclude=options.exclude) try: mgr.trigger_range( buildername=buildername, repo_name=repo_name, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=trigger_build_if_missing ) except Exception as e: LOG.exception(e) exit(1)
def _create_task(buildername, repo_name, revision, metadata=None, task_graph_id=None, parent_task_id=None, requires=None, properties={}, *args, **kwargs): """Return takcluster task to trigger a buildbot builder. This function creates a generic task with the minimum amount of information required for the buildbot-bridge to consider it valid. You can establish a list dependencies to other tasks through the requires field. :param buildername: The name of a buildbot builder. :type buildername: str :param repo_name: The name of a repository e.g. mozilla-inbound :type repo_name: str :param revision: Changeset ID of a revision. :type revision: str :param metadata: Metadata for the task. If not specified, generate it. :type metadata: json :param task_graph_id: TC graph id to which this task belongs to :type task_graph_id: str :param parent_task_id: Task from which to find artifacts. It is not a dependency. :type parent_task_id: str :param requires: List of taskIds of other tasks which this task depends on. :type requires: list :returns: TaskCluster graph :rtype: dict """ if not valid_builder(buildername): raise MozciError("The builder '%s' is not a valid one." % buildername) builder_info = get_buildername_metadata(buildername) if builder_info['repo_name'] != repo_name: raise MozciError("The builder '%s' should be for repo: %s." % (buildername, repo_name)) repo_url = query_repo_url(repo_name) push_info = query_push_by_revision(repo_url=repo_url, revision=revision) full_revision = str(push_info.changesets[0].node) # Needed because of bug 1195751 all_properties = { 'product': builder_info['product'], 'who': push_info.user, } all_properties.update(properties) all_properties.update(get_builder_extra_properties(buildername)) metadata = metadata if metadata is not None else \ generate_metadata(repo_name=repo_name, revision=revision, name=buildername) # The task's name is used in the task-graph-inspector to list all tasks # and using the buildername makes it easy for a person to recognize each job. metadata['name'] = buildername # XXX: We should validate that the parent task is a valid parent platform # e.g. do not schedule Windows tests against Linux builds task = create_task( repo_name=repo_name, revision=revision, taskGroupId=task_graph_id, workerType='buildbot-bridge', provisionerId='buildbot-bridge', payload={ 'buildername': buildername, 'sourcestamp': { 'branch': repo_name, 'revision': full_revision }, 'properties': all_properties, }, metadata=metadata, ) if requires: task['requires'] = requires # Setting a parent_task_id as a property allows Mozharness to # determine the artifacts we need for this job to run properly if parent_task_id: task['task']['payload']['properties'][ 'parent_task_id'] = parent_task_id return task