def test_extra_builder_properties(test_job, nightly): """Testing the get_builder_extra_properties function for correct buildid""" extra_properties = get_builder_extra_properties(test_job) if nightly is True: assert 'buildid' in extra_properties, "buildid is needed for nightly builds" timestamp_now = int(time.strftime("%Y%m%d%H%M%S")) timestamp_obtained = int(extra_properties['buildid']) limit = 5 assert timestamp_now - timestamp_obtained < limit, "buildid should be a recent timestamp" else: assert 'buildid' not in extra_properties, "Non nighlty builds need not have buildid"
def _create_task(buildername, repo_name, revision, metadata=None, task_graph_id=None, parent_task_id=None, requires=None, properties={}, *args, **kwargs): """Return takcluster task to trigger a buildbot builder. This function creates a generic task with the minimum amount of information required for the buildbot-bridge to consider it valid. You can establish a list dependencies to other tasks through the requires field. :param buildername: The name of a buildbot builder. :type buildername: str :param repo_name: The name of a repository e.g. mozilla-inbound :type repo_name: str :param revision: Changeset ID of a revision. :type revision: str :param metadata: Metadata for the task. If not specified, generate it. :type metadata: json :param task_graph_id: TC graph id to which this task belongs to :type task_graph_id: str :param parent_task_id: Task from which to find artifacts. It is not a dependency. :type parent_task_id: str :param requires: List of taskIds of other tasks which this task depends on. :type requires: list :returns: TaskCluster graph :rtype: dict """ if not valid_builder(buildername): raise MozciError("The builder '%s' is not a valid one." % buildername) builder_info = get_buildername_metadata(buildername) if builder_info['repo_name'] != repo_name: raise MozciError( "The builder '%s' should be for repo: %s." % (buildername, repo_name) ) repo_url = query_repo_url(repo_name) push_info = query_push_by_revision(repo_url=repo_url, revision=revision) full_revision = str(push_info.changesets[0].node) # Needed because of bug 1195751 all_properties = { 'product': builder_info['product'], 'who': push_info.user, } all_properties.update(properties) all_properties.update(get_builder_extra_properties(buildername)) metadata = metadata if metadata is not None else \ generate_metadata(repo_name=repo_name, revision=revision, name=buildername) # The task's name is used in the task-graph-inspector to list all tasks # and using the buildername makes it easy for a person to recognize each job. metadata['name'] = buildername # XXX: We should validate that the parent task is a valid parent platform # e.g. do not schedule Windows tests against Linux builds task = create_task( repo_name=repo_name, revision=revision, taskGroupId=task_graph_id, workerType='buildbot-bridge', provisionerId='buildbot-bridge', payload={ 'buildername': buildername, 'sourcestamp': { 'branch': repo_name, 'revision': full_revision }, 'properties': all_properties, }, metadata=metadata, ) if requires: task['requires'] = requires # Setting a parent_task_id as a property allows Mozharness to # determine the artifacts we need for this job to run properly if parent_task_id: task['task']['payload']['properties']['parent_task_id'] = parent_task_id return task
def trigger_job(revision, buildername, times=1, files=None, dry_run=False, extra_properties={}, trigger_build_if_missing=True): """Trigger a job through self-serve. We return a list of all requests made. """ if not extra_properties: extra_properties = {} extra_properties.update(get_builder_extra_properties(buildername)) repo_name = query_repo_name_from_buildername(buildername) builder_to_trigger = None list_of_requests = [] repo_url = repositories.query_repo_url(repo_name) if len(revision) != 40: LOG.info('We are going to convert the revision into 40 chars ({}).'.format(revision)) push_info = query_push_by_revision(repo_url, revision) revision = push_info.changesets[0].node assert len(revision) == 40, 'This should have been a 40 char revision.' if VALIDATE and not valid_revision(repo_url, revision): return list_of_requests LOG.info("==> We want to trigger '%s' a total of %d time(s)." % (buildername, times)) if VALIDATE and not valid_builder(buildername): LOG.error("The builder %s requested is invalid" % buildername) # XXX How should we exit cleanly? exit(-1) if files: builder_to_trigger = buildername _all_urls_reachable(files) else: builder_to_trigger, package_url, test_url = determine_trigger_objective( revision=revision, buildername=buildername, trigger_build_if_missing=trigger_build_if_missing, will_use_buildapi=True ) if builder_to_trigger != buildername and times != 1: # The user wants to trigger a downstream job, # however, we need a build job instead. # We should trigger the downstream job multiple times, however, # we only trigger the upstream jobs once. LOG.debug("Since we need to trigger a build job we don't need to " "trigger it %s times but only once." % times) if trigger_build_if_missing: LOG.info("In order to trigger %s %i times, " "please run the script again after %s ends." % (buildername, times, builder_to_trigger)) else: LOG.info("We won't trigger '%s' because there is no working build." % buildername) LOG.info("") times = 1 if builder_to_trigger: if dry_run: LOG.info("Dry-run: We were going to request '%s' %s times." % (builder_to_trigger, times)) # Running with dry_run being True will only output information trigger( builder=builder_to_trigger, revision=revision, files=[package_url, test_url], dry_run=dry_run, extra_properties=extra_properties ) else: for _ in range(times): req = trigger( builder=builder_to_trigger, revision=revision, files=[package_url, test_url], dry_run=dry_run, extra_properties=extra_properties ) if req is not None: list_of_requests.append(req) else: LOG.debug("Nothing needs to be triggered") # Cleanup old buildjson files. clean_directory() return list_of_requests
def trigger_job(revision, buildername, times=1, files=None, dry_run=False, extra_properties={}, trigger_build_if_missing=True): """Trigger a job through self-serve. We return a list of all requests made. """ if not extra_properties: extra_properties = {} extra_properties.update(get_builder_extra_properties(buildername)) repo_name = query_repo_name_from_buildername(buildername) builder_to_trigger = None list_of_requests = [] repo_url = repositories.query_repo_url(repo_name) if len(revision) != 40: LOG.warning( 'We should not be using revisions less than 40 chars ({}).'.format( revision)) push_info = query_push_by_revision(repo_url, revision) revision = push_info.changesets[0].node assert len(revision) == 40, 'This should have been a 40 char revision.' if VALIDATE and not valid_revision(repo_url, revision): return list_of_requests LOG.info("==> We want to trigger '%s' a total of %d time(s)." % (buildername, times)) LOG.info("") # Extra line to help visual of logs if VALIDATE and not valid_builder(buildername): LOG.error("The builder %s requested is invalid" % buildername) # XXX How should we exit cleanly? exit(-1) if files: builder_to_trigger = buildername _all_urls_reachable(files) else: builder_to_trigger, package_url, test_url = determine_trigger_objective( revision=revision, buildername=buildername, trigger_build_if_missing=trigger_build_if_missing, will_use_buildapi=True) if builder_to_trigger != buildername and times != 1: # The user wants to trigger a downstream job, # however, we need a build job instead. # We should trigger the downstream job multiple times, however, # we only trigger the upstream jobs once. LOG.debug("Since we need to trigger a build job we don't need to " "trigger it %s times but only once." % times) if trigger_build_if_missing: LOG.info("In order to trigger %s %i times, " "please run the script again after %s ends." % (buildername, times, builder_to_trigger)) else: LOG.info( "We won't trigger '%s' because there is no working build." % buildername) LOG.info("") times = 1 if builder_to_trigger: if dry_run: LOG.info("Dry-run: We were going to request '%s' %s times." % (builder_to_trigger, times)) # Running with dry_run being True will only output information trigger(builder=builder_to_trigger, revision=revision, files=[package_url, test_url], dry_run=dry_run, extra_properties=extra_properties) else: for _ in range(times): req = trigger(builder=builder_to_trigger, revision=revision, files=[package_url, test_url], dry_run=dry_run, extra_properties=extra_properties) if req is not None: list_of_requests.append(req) else: LOG.debug("Nothing needs to be triggered") # Cleanup old buildjson files. clean_directory() return list_of_requests
def _create_task(buildername, repo_name, revision, metadata=None, task_graph_id=None, parent_task_id=None, requires=None, properties={}, *args, **kwargs): """Return takcluster task to trigger a buildbot builder. This function creates a generic task with the minimum amount of information required for the buildbot-bridge to consider it valid. You can establish a list dependencies to other tasks through the requires field. :param buildername: The name of a buildbot builder. :type buildername: str :param repo_name: The name of a repository e.g. mozilla-inbound :type repo_name: str :param revision: Changeset ID of a revision. :type revision: str :param metadata: Metadata for the task. If not specified, generate it. :type metadata: json :param task_graph_id: TC graph id to which this task belongs to :type task_graph_id: str :param parent_task_id: Task from which to find artifacts. It is not a dependency. :type parent_task_id: str :param requires: List of taskIds of other tasks which this task depends on. :type requires: list :returns: TaskCluster graph :rtype: dict """ if not valid_builder(buildername): raise MozciError("The builder '%s' is not a valid one." % buildername) builder_info = get_buildername_metadata(buildername) if builder_info['repo_name'] != repo_name: raise MozciError("The builder '%s' should be for repo: %s." % (buildername, repo_name)) repo_url = query_repo_url(repo_name) push_info = query_push_by_revision(repo_url=repo_url, revision=revision) full_revision = str(push_info.changesets[0].node) # Needed because of bug 1195751 all_properties = { 'product': builder_info['product'], 'who': push_info.user, } all_properties.update(properties) all_properties.update(get_builder_extra_properties(buildername)) metadata = metadata if metadata is not None else \ generate_metadata(repo_name=repo_name, revision=revision, name=buildername) # The task's name is used in the task-graph-inspector to list all tasks # and using the buildername makes it easy for a person to recognize each job. metadata['name'] = buildername # XXX: We should validate that the parent task is a valid parent platform # e.g. do not schedule Windows tests against Linux builds task = create_task( repo_name=repo_name, revision=revision, taskGroupId=task_graph_id, workerType='buildbot-bridge', provisionerId='buildbot-bridge', payload={ 'buildername': buildername, 'sourcestamp': { 'branch': repo_name, 'revision': full_revision }, 'properties': all_properties, }, metadata=metadata, ) if requires: task['requires'] = requires # Setting a parent_task_id as a property allows Mozharness to # determine the artifacts we need for this job to run properly if parent_task_id: task['task']['payload']['properties'][ 'parent_task_id'] = parent_task_id return task