def query_revisions_range(repo_name, from_revision, to_revision): """Return a list of revisions for that range.""" return pushlog.query_revisions_range( buildapi.query_repo_url(repo_name), from_revision, to_revision, )
def main(): options = parse_args() repo_url = query_repo_url(options.repo_name) if options.start_rev and options.end_rev: url = "%s?fromchange=%s&tochange=%s&tipsonly=1" % (JSON_PUSHES % {"repo_url": repo_url}, options.start_rev, options.end_rev) elif options.changeset: url = "%s?changeset=%s" % (JSON_PUSHES % {"repo_url": repo_url}, options.changeset) else: raise Exception("You need to enter either -start-rev and --end-rev or --changeset") response = requests.get(url).json() total_revisions = [] for push, value in response.iteritems(): for changeset in value["changesets"]: total_revisions.append(changeset) LOG.info("We are going to push the following revisions on try:\n %s" % total_revisions) for revision in total_revisions: LOG.info("Updating and pushing the revision %s to try" % revision) commands = [ ['hg', 'qpop', '-a'], ['hg', 'update', '%s' % revision], ['hg', 'qnew', 'trypatch'], ['hg', 'qpush', 'trypatch'], ['hg', 'qref', '-m', 'hg update %s; %s' % (revision, options.try_syntax)], ['hg', 'push', '-f', 'try'] ] print commands for command in commands: subprocess.call(command, cwd=options.cwd) LOG.info("Pushed revision %s to try" % revision)
def main(): options = parse_args() if not valid_credentials(): sys.exit(-1) if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) if options.rev == 'tip': repo_url = query_repo_url(options.repo) options.rev = query_repo_tip(repo_url) LOG.info("The tip of %s is %s", options.repo, options.rev) filters_in = options.includes.split(',') + [options.repo] filters_out = [] if options.exclude: filters_out = options.exclude.split(',') buildernames = filter_buildernames( buildernames=query_builders(repo_name=options.repo), include=filters_in, exclude=filters_out) if len(buildernames) == 0: LOG.info("0 jobs match these filters, please try again.") return cont = raw_input( "%i jobs will be triggered, do you wish to continue? y/n/d (d=show details) " % len(buildernames)) if cont.lower() == 'd': LOG.info("The following jobs will be triggered: \n %s" % '\n'.join(buildernames)) cont = raw_input("Do you wish to continue? y/n ") if cont.lower() != 'y': exit(1) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) for buildername in buildernames: trigger_range( buildername=buildername, revisions=[options.rev], times=options.times, dry_run=options.dry_run, ) LOG.info('https://treeherder.mozilla.org/#/jobs?%s' % urllib.urlencode( { 'repo': query_repo_name_from_buildername(buildername), 'fromchange': options.rev, 'tochange': options.rev, 'filter-searchStr': buildername }))
def _query_metadata(repo_name, revision): repo_url = query_repo_url(repo_name) push_info = query_revision_info(repo_url, revision) return { 'owner': push_info['user'], 'source': '%s/rev/%s' % (repo_url, revision), 'description': 'Task graph generated via Mozilla CI tools', }
def main(): options = parse_args() if not valid_credentials(): sys.exit(-1) if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) if options.rev == 'tip': repo_url = query_repo_url(options.repo) options.rev = query_repo_tip(repo_url) LOG.info("The tip of %s is %s", options.repo, options.rev) filters_in = options.includes.split(',') + [options.repo] filters_out = [] if options.exclude: filters_out = options.exclude.split(',') buildernames = filter_buildernames( buildernames=query_builders(repo_name=options.repo), include=filters_in, exclude=filters_out ) if len(buildernames) == 0: LOG.info("0 jobs match these filters, please try again.") return cont = raw_input("%i jobs will be triggered, do you wish to continue? y/n/d (d=show details) " % len(buildernames)) if cont.lower() == 'd': LOG.info("The following jobs will be triggered: \n %s" % '\n'.join(buildernames)) cont = raw_input("Do you wish to continue? y/n ") if cont.lower() != 'y': exit(1) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) for buildername in buildernames: trigger_range( buildername=buildername, revisions=[options.rev], times=options.times, dry_run=options.dry_run, ) LOG.info('https://treeherder.mozilla.org/#/jobs?%s' % urllib.urlencode({'repo': query_repo_name_from_buildername(buildername), 'fromchange': options.rev, 'tochange': options.rev, 'filter-searchStr': buildername}))
def get_revision_range(repo_name, revision): """ Query pushlog in mozci and return revisions in a range of six. """ try: if repo_name == 'mobile': repo_name = 'mozilla-central' repo_url = query_repo_url(repo_name) revlist = query_revisions_range_from_revision_and_delta(repo_url, revision, delta=6) except: print "exception while getting repo: %s, revision: %s" % (repo_name, revision) raise return revlist[0], revlist[-1]
def get_revision_range(repo_name, revision): """ Query pushlog in mozci and return revisions in a range of six. """ try: if repo_name == 'mobile': repo_name = 'mozilla-central' repo_url = query_repo_url(repo_name) revlist = query_revisions_range_from_revision_before_and_after(repo_url, revision, 6, 6) except: print "exception while getting repo: %s, revision: %s" % (repo_name, revision) raise return revlist[-1], revlist[0]
def _query_metadata(repo_name, revision, name, description=None): global METADATA if not METADATA: repo_url = query_repo_url(repo_name) push_info = query_revision_info(repo_url, revision) if not description: description = 'Task graph generated via Mozilla CI tools' METADATA = { 'description': description, 'owner': push_info['user'], 'source': '%s/rev/%s' % (repo_url, revision), } result = {'name': name} result.update(METADATA) return result
def query_repo_url_from_buildername(buildername): """Return the full repository URL for a given known buildername.""" repo_name = query_repo_name_from_buildername(buildername) return buildapi.query_repo_url(repo_name)
def test_query_repo_url_invalid(self, query_repository): """query_repo_url should raise an Exception when a repository not in the JSON file.""" with self.assertRaises(Exception): buildapi.query_repo_url("not-a-repo")
def test_query_repo_url_valid(self, query_repository): """Test query_repo_url with a mock value for query_repository.""" self.assertEquals(buildapi.query_repo_url('repo1'), "https://hg.mozilla.org/releases/repo1")
def test_query_repo_url_valid(self, query_repository): """Test query_repo_url with a mock value for query_repository.""" self.assertEquals( buildapi.query_repo_url('repo1'), "https://hg.mozilla.org/releases/repo1")
def query_repo_url(repo_name): """Return the full repository URL for a given known repo_name.""" return buildapi.query_repo_url(repo_name)
def trigger_range(buildername, revisions, times=1, dry_run=False, files=None, extra_properties=None, trigger_build_if_missing=True): """Schedule the job named "buildername" ("times" times) in every revision on 'revisions'.""" repo_name = query_repo_name_from_buildername(buildername) repo_url = buildapi.query_repo_url(repo_name) if revisions != []: LOG.info("We want to have %s job(s) of %s on revisions %s" % (times, buildername, str(revisions))) for rev in revisions: LOG.info("") LOG.info("=== %s ===" % rev) if VALIDATE and not pushlog.valid_revision(repo_url, rev): LOG.info( "We can't trigger anything on pushes without a valid revision." ) continue LOG.info("We want to have %s job(s) of %s on revision %s" % (times, buildername, rev)) # 1) How many potentially completed jobs can we get for this buildername? matching_jobs = QUERY_SOURCE.get_matching_jobs(repo_name, rev, buildername) successful_jobs, pending_jobs, running_jobs, _, failed_jobs = \ _status_summary(matching_jobs) potential_jobs = pending_jobs + running_jobs + successful_jobs + failed_jobs # TODO: change this debug message when we have a less hardcoded _status_summary LOG.debug("We found %d pending/running jobs, %d successful jobs and " "%d failed jobs" % (pending_jobs + running_jobs, successful_jobs, failed_jobs)) if potential_jobs >= times: LOG.info( "We have %d job(s) for '%s' which is enough for the %d job(s) we want." % (potential_jobs, buildername, times)) else: # 2) If we have less potential jobs than 'times' instances then # we need to fill it in. LOG.info("We have found %d potential job(s) matching '%s' on %s. " "We need to trigger more." % (potential_jobs, buildername, rev)) # If a job matching what we want already exists, we can # use the retrigger API in self-serve to retrigger that # instead of creating a new arbitrary job if len(matching_jobs) > 0 and files is None: request_id = QUERY_SOURCE.get_buildapi_request_id( repo_name, matching_jobs[0]) buildapi.make_retrigger_request(repo_name, request_id, count=(times - potential_jobs), dry_run=dry_run) # If no matching job exists, we have to trigger a new arbitrary job else: list_of_requests = trigger_job( revision=rev, buildername=buildername, times=(times - potential_jobs), dry_run=dry_run, files=files, extra_properties=extra_properties, trigger_build_if_missing=trigger_build_if_missing) if list_of_requests and any(req.status_code != 202 for req in list_of_requests): LOG.warning("Not all requests succeeded.")
def trigger_range(buildername, revisions, times=1, dry_run=False, files=None, extra_properties=None, trigger_build_if_missing=True): """Schedule the job named "buildername" ("times" times) in every revision on 'revisions'.""" repo_name = query_repo_name_from_buildername(buildername) repo_url = buildapi.query_repo_url(repo_name) if revisions != []: LOG.info("We want to have %s job(s) of %s on revisions %s" % (times, buildername, str(revisions))) for rev in revisions: LOG.info("") LOG.info("=== %s ===" % rev) if VALIDATE and not pushlog.valid_revision(repo_url, rev): LOG.info("We can't trigger anything on pushes without a valid revision.") continue LOG.info("We want to have %s job(s) of %s on revision %s" % (times, buildername, rev)) # 1) How many potentially completed jobs can we get for this buildername? matching_jobs = QUERY_SOURCE.get_matching_jobs(repo_name, rev, buildername) successful_jobs, pending_jobs, running_jobs, _, failed_jobs = \ _status_summary(matching_jobs) potential_jobs = pending_jobs + running_jobs + successful_jobs + failed_jobs # TODO: change this debug message when we have a less hardcoded _status_summary LOG.debug("We found %d pending/running jobs, %d successful jobs and " "%d failed jobs" % (pending_jobs + running_jobs, successful_jobs, failed_jobs)) if potential_jobs >= times: LOG.info("We have %d job(s) for '%s' which is enough for the %d job(s) we want." % (potential_jobs, buildername, times)) else: # 2) If we have less potential jobs than 'times' instances then # we need to fill it in. LOG.info("We have found %d potential job(s) matching '%s' on %s. " "We need to trigger more." % (potential_jobs, buildername, rev)) # If a job matching what we want already exists, we can # use the retrigger API in self-serve to retrigger that # instead of creating a new arbitrary job if len(matching_jobs) > 0 and files is None: request_id = QUERY_SOURCE.get_buildapi_request_id(repo_name, matching_jobs[0]) buildapi.make_retrigger_request( repo_name, request_id, count=(times - potential_jobs), dry_run=dry_run) # If no matching job exists, we have to trigger a new arbitrary job else: list_of_requests = trigger_job( revision=rev, buildername=buildername, times=(times - potential_jobs), dry_run=dry_run, files=files, extra_properties=extra_properties, trigger_build_if_missing=trigger_build_if_missing) if list_of_requests and any(req.status_code != 202 for req in list_of_requests): LOG.warning("Not all requests succeeded.")
def trigger_job(revision, buildername, times=1, files=None, dry_run=False, extra_properties=None, trigger_build_if_missing=True): """Trigger a job through self-serve. We return a list of all requests made. """ repo_name = query_repo_name_from_buildername(buildername) builder_to_trigger = None list_of_requests = [] repo_url = buildapi.query_repo_url(repo_name) if VALIDATE and not pushlog.valid_revision(repo_url, revision): return list_of_requests LOG.info("===> We want to trigger '%s' on revision '%s' a total of %d time(s)." % (buildername, revision, times)) LOG.info("") # Extra line to help visual of logs if VALIDATE and not valid_builder(buildername): LOG.error("The builder %s requested is invalid" % buildername) # XXX How should we exit cleanly? exit(-1) if files: builder_to_trigger = buildername _all_urls_reachable(files) else: builder_to_trigger, files = determine_trigger_objective( revision=revision, buildername=buildername, trigger_build_if_missing=trigger_build_if_missing ) if builder_to_trigger != buildername and times != 1: # The user wants to trigger a downstream job, # however, we need a build job instead. # We should trigger the downstream job multiple times, however, # we only trigger the upstream jobs once. LOG.debug("Since we need to trigger a build job we don't need to " "trigger it %s times but only once." % times) if trigger_build_if_missing: LOG.info("In order to trigger %s %i times, " "please run the script again after %s ends." % (buildername, times, builder_to_trigger)) else: LOG.info("We won't trigger '%s' because there is no working build." % buildername) LOG.info("") times = 1 if builder_to_trigger: if dry_run: LOG.info("Dry-run: We were going to request '%s' %s times." % (builder_to_trigger, times)) # Running with dry_run being True will only output information trigger(builder_to_trigger, revision, files, dry_run, extra_properties) else: for _ in range(times): req = trigger(builder_to_trigger, revision, files, dry_run, extra_properties) if req is not None: list_of_requests.append(req) else: LOG.debug("Nothing needs to be triggered") # Cleanup old buildjson files. clean_directory() return list_of_requests
def trigger_job(revision, buildername, times=1, files=None, dry_run=False, extra_properties=None, trigger_build_if_missing=True): """Trigger a job through self-serve. We return a list of all requests made. """ repo_name = query_repo_name_from_buildername(buildername) builder_to_trigger = None list_of_requests = [] repo_url = buildapi.query_repo_url(repo_name) if VALIDATE and not pushlog.valid_revision(repo_url, revision): return list_of_requests LOG.info( "===> We want to trigger '%s' on revision '%s' a total of %d time(s)." % (buildername, revision, times)) LOG.info("") # Extra line to help visual of logs if VALIDATE and not valid_builder(buildername): LOG.error("The builder %s requested is invalid" % buildername) # XXX How should we exit cleanly? exit(-1) if files: builder_to_trigger = buildername _all_urls_reachable(files) else: builder_to_trigger, files = _determine_trigger_objective( revision=revision, buildername=buildername, trigger_build_if_missing=trigger_build_if_missing) if builder_to_trigger != buildername and times != 1: # The user wants to trigger a downstream job, # however, we need a build job instead. # We should trigger the downstream job multiple times, however, # we only trigger the upstream jobs once. LOG.debug("Since we need to trigger a build job we don't need to " "trigger it %s times but only once." % times) if trigger_build_if_missing: LOG.info("In order to trigger %s %i times, " "please run the script again after %s ends." % (buildername, times, builder_to_trigger)) else: LOG.info( "We won't trigger '%s' because there is no working build." % buildername) LOG.info("") times = 1 if builder_to_trigger: if dry_run: LOG.info("Dry-run: We were going to request '%s' %s times." % (builder_to_trigger, times)) # Running with dry_run being True will only output information trigger(builder_to_trigger, revision, files, dry_run, extra_properties) else: for _ in range(times): req = trigger(builder_to_trigger, revision, files, dry_run, extra_properties) if req is not None: list_of_requests.append(req) else: LOG.debug("Nothing needs to be triggered") # Cleanup old buildjson files. clean_directory() return list_of_requests
def main(): options = parse_args() validate_options(options) if not valid_credentials(): sys.exit(-1) if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: options.repo_name = query_repo_name_from_buildername(options.buildernames[0]) if options.rev == 'tip': repo_url = query_repo_url(options.repo_name) options.rev = query_repo_tip(repo_url) LOG.info("The tip of %s is %s", options.repo_name, options.rev) # Mode 1: Trigger coalesced jobs if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status(options.repo_name, options.rev, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=options.repo_name, request_id=request_id, dry_run=options.dry_run) return # Mode #2: Fill-in a revision if options.fill_revision: trigger_missing_jobs_for_revision( repo_name=options.repo_name, revision=options.rev, dry_run=options.dry_run ) return # Mode #3: Trigger jobs based on revision list modifiers for buildername in options.buildernames: revlist = determine_revlist( repo_url=repo_url, buildername=buildername, rev=options.rev, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) try: trigger_range( buildername=buildername, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=options.trigger_build_if_missing ) except Exception, e: LOG.exception(e) exit(1) if revlist: LOG.info('https://treeherder.mozilla.org/#/jobs?%s' % urllib.urlencode({'repo': options.repo_name, 'fromchange': revlist[-1], 'tochange': revlist[0], 'filter-searchStr': buildername}))
def _create_task(buildername, repo_name, revision, task_graph_id=None, parent_task_id=None, requires=None, properties={}): """Return takcluster task to trigger a buildbot builder. This function creates a generic task with the minimum amount of information required for the buildbot-bridge to consider it valid. You can establish a list dependencies to other tasks through the requires field. :param buildername: The name of a buildbot builder. :type buildername: str :param repo_name: The name of a repository e.g. mozilla-inbound :type repo_name: str :param revision: Changeset ID of a revision. :type revision: str :param task_graph_id: TC graph id to which this task belongs to :type task_graph_id: str :param parent_task_id: Task from which to find artifacts. It is not a dependency. :type parent_task_id: str :param requires: List of taskIds of other tasks which this task depends on. :type requires: list :returns: TaskCluster graph :rtype: dict """ if not valid_builder(buildername): raise MozciError("The builder '%s' is not a valid one." % buildername) builder_info = get_buildername_metadata(buildername) if builder_info['repo_name'] != repo_name: raise MozciError( "The builder '%s' should be for repo: %s." % (buildername, repo_name) ) repo_url = query_repo_url(repo_name) push_info = query_revision_info(repo_url, revision) # Needed because of bug 1195751 all_properties = { 'product': builder_info['product'], 'who': push_info['user'], } all_properties.update(properties) # XXX: We should validate that the parent task is a valid parent platform # e.g. do not schedule Windows tests against Linux builds task = create_task( repo_name=repo_name, revision=revision, taskGroupId=task_graph_id, workerType='buildbot-bridge', provisionerId='buildbot-bridge', payload={ 'buildername': buildername, 'sourcestamp': { 'branch': repo_name, 'revision': revision }, 'properties': all_properties, }, metadata_name=buildername ) if requires: task['requires'] = requires # Setting a parent_task_id as a property allows Mozharness to # determine the artifacts we need for this job to run properly if parent_task_id: task['task']['payload']['properties']['parent_task_id'] = parent_task_id return task
def main(): options = parse_args() validate_options(options) valid_credentials() if options.debug: LOG = setup_logging(logging.DEBUG) else: LOG = setup_logging(logging.INFO) # Setting the QUERY_SOURCE global variable in mozci.py set_query_source(options.query_source) if options.buildernames: options.buildernames = sanitize_buildernames(options.buildernames) repo_url = query_repo_url_from_buildername(options.buildernames[0]) if not options.repo_name: options.repo_name = query_repo_name_from_buildername(options.buildernames[0]) if options.rev == 'tip': repo_url = query_repo_url(options.repo_name) options.rev = query_repo_tip(repo_url) LOG.info("The tip of %s is %s", options.repo_name, options.rev) if options.coalesced: query_api = BuildApi() request_ids = query_api.find_all_jobs_by_status(options.repo_name, options.rev, COALESCED) if len(request_ids) == 0: LOG.info('We did not find any coalesced job') for request_id in request_ids: make_retrigger_request(repo_name=options.repo_name, request_id=request_id, dry_run=options.dry_run) return for buildername in options.buildernames: revlist = determine_revlist( repo_url=repo_url, buildername=buildername, rev=options.rev, back_revisions=options.back_revisions, delta=options.delta, from_rev=options.from_rev, backfill=options.backfill, skips=options.skips, max_revisions=options.max_revisions) try: trigger_range( buildername=buildername, revisions=revlist, times=options.times, dry_run=options.dry_run, files=options.files, trigger_build_if_missing=options.trigger_build_if_missing ) except Exception, e: LOG.exception(e) exit(1) if revlist: LOG.info('https://treeherder.mozilla.org/#/jobs?%s' % urllib.urlencode({'repo': options.repo_name, 'fromchange': revlist[-1], 'tochange': revlist[0], 'filter-searchStr': buildername}))
def _create_task(buildername, repo_name, revision, task_graph_id=None, parent_task_id=None, requires=None): """Return takcluster task to trigger a buildbot builder. This function creates a generic task with the minimum amount of information required for the buildbot-bridge to consider it valid. You can establish a list dependencies to other tasks through the requires field. :param buildername: The name of a buildbot builder. :type buildername: str :param repo_name: The name of a repository e.g. mozilla-inbound :type repo_name: str :param revision: Changeset ID of a revision. :type revision: str :param task_graph_id: TC graph id to which this task belongs to :type task_graph_id: str :param parent_task_id: Task from which to find artifacts. It is not a dependency. :type parent_task_id: str :param requires: List of taskIds of other tasks which this task depends on. :type requires: list :returns: TaskCluster graph :rtype: dict """ if not valid_builder(buildername): raise MozciError("The builder '%s' is not a valid one." % buildername) builder_info = get_buildername_metadata(buildername) if builder_info['repo_name'] != repo_name: raise MozciError("The builder '%s' should be for repo: %s." % (buildername, repo_name)) repo_url = query_repo_url(repo_name) push_info = query_revision_info(repo_url, revision) # XXX: We should validate that the parent task is a valid parent platform # e.g. do not schedule Windows tests against Linux builds task = create_task( repo_name=repo_name, revision=revision, taskGroupId=task_graph_id, workerType='buildbot-bridge', provisionerId='buildbot-bridge', payload={ 'buildername': buildername, 'sourcestamp': { 'branch': repo_name, 'revision': revision }, # Needed because of bug 1195751 'properties': { 'product': builder_info['product'], 'who': push_info['user'] } }, metadata_name=buildername) if requires: task['requires'] = requires # Setting a parent_task_id as a property allows Mozharness to # determine the artifacts we need for this job to run properly if parent_task_id: task['task']['payload']['properties'][ 'parent_task_id'] = parent_task_id return task