def on_buildbot_event(data, message, dry_run, stage=False): """Act upon buildbot events.""" # Pulse gives us a job_id and a job_guid, we need request_id. LOG.info( "%s action requested by %s on repo_name %s with job_id: %s" % (data["action"], data["requester"], data["project"], data["job_id"]) ) # Cleaning mozci caches buildjson.BUILDS_CACHE = {} query_jobs.JOBS_CACHE = {} if stage: treeherder_client = TreeherderClient(host="treeherder.allizom.org") else: treeherder_client = TreeherderClient() repo_name = data["project"] job_id = data["job_id"] result = treeherder_client.get_jobs(repo_name, id=job_id) # If result not found, ignore if not result: LOG.info("We could not find any result for repo_name: %s and " "job_id: %s" % (repo_name, job_id)) message.ack() return result = result[0] buildername = result["ref_data_name"] resultset_id = result["result_set_id"] result_sets = treeherder_client.get_resultsets(repo_name, id=resultset_id) revision = result_sets[0]["revision"] action = data["action"] status = None buildername = filter_invalid_builders(buildername) # Treeherder can send us invalid builder names # https://bugzilla.mozilla.org/show_bug.cgi?id=1242038 if buildername is None: status = "Builder %s was invalid." % buildername[0] # Backfill action elif action == "backfill": manual_backfill(revision, buildername, max_revisions=get_maxRevisions(buildername), dry_run=dry_run) if not dry_run: status = "Backfill request sent" else: status = "Dry-run mode, nothing was backfilled" # Send a pulse message showing what we did message_sender = MessageHandler() pulse_message = {"job_id": job_id, "action": action, "requester": data["requester"], "status": status} routing_key = "{}.{}".format(repo_name, action) try: message_sender.publish_message(pulse_message, routing_key) except: LOG.warning("Failed to publish message over pulse stream.") if not dry_run: # We need to ack the message to remove it from our queue message.ack()
def on_event(data, message, dry_run): """Automatically backfill failed jobs.""" # Cleaning mozci caches buildjson.BUILDS_CACHE = {} query_jobs.JOBS_CACHE = {} payload = data["payload"] status = payload["status"] buildername = payload["buildername"] # Backfill a failed job if status in [FAILURE, WARNING]: buildername = filter_invalid_builders(buildername) # Treeherder can send us invalid builder names # https://bugzilla.mozilla.org/show_bug.cgi?id=1242038 if buildername is None: if not dry_run: # We need to ack the message to remove it from our queue message.ack() return revision = payload["revision"] LOG.info("**") # visual separator LOG.info("Failed job found at revision %s. Buildername: %s", revision, buildername) try: # We want to ensure 1 appearance of the job on every revision revlist = find_backfill_revlist( revision=revision, max_revisions=get_maxRevisions(buildername), buildername=buildername) trigger_range( buildername=buildername, revisions=revlist[1:], times=1, dry_run=dry_run, trigger_build_if_missing=False ) if not dry_run: # We need to ack the message to remove it from our queue message.ack() except ConnectionError: # The message has not been acked so we will try again LOG.warning("Connection error. Trying again") except PushlogError, e: # Unable to retrieve pushlog data. Please check repo_url and revision specified. LOG.warning(str(e)) except Exception, e: # The message has not been acked so we will try again LOG.warning(str(e)) raise