def find_new_builds(master_url, root_json, build_db): """Given a dict of previously-seen builds, find new builds on each builder. Note that we use the 'cachedBuilds' here since it should be faster, and this script is meant to be run frequently enough that it shouldn't skip any builds. 'Frequently enough' means 1 minute in the case of Buildbot or cron, so the only way for the scan to be overwhelmed is if > cachedBuilds builds complete within 1 minute. As cachedBuilds is scaled per number of slaves per builder, the only way for this to really happen is if a build consistently takes < 1 minute to complete. """ new_builds = {} build_db.masters[master_url] = build_db.masters.get(master_url, {}) last_finished_build = {} for builder, builds in build_db.masters[master_url].iteritems(): finished = [int(y[0]) for y in builds.iteritems() if y[1].finished] if finished: last_finished_build[builder] = max(finished) for buildername, builder in root_json['builders'].iteritems(): # cachedBuilds are the builds in the cache, while currentBuilds are the # currently running builds. Thus cachedBuilds can be unfinished or finished, # while currentBuilds are always unfinished. candidate_builds = set(builder['cachedBuilds'] + builder['currentBuilds']) if buildername in last_finished_build: new_builds[buildername] = [ buildnum for buildnum in candidate_builds if buildnum > last_finished_build[buildername]] else: if buildername in build_db.masters[master_url]: # We've seen this builder before, but haven't seen a finished build. # Scan finished builds as well as unfinished. new_builds[buildername] = candidate_builds else: # We've never seen this builder before, only scan unfinished builds. # We're explicitly only dealing with current builds since we haven't # seen this builder before. Thus, the next time a scan is run, # only unfinished builds will be in the build_db. This immediately drops # us into the section above (builder is in the db, but no finished # builds yet.) In this state all the finished builds will be loaded in, # firing off an email storm any time the build_db changes or a new # builder is added. We set the last finished build here to prevent that. finished = set(builder['cachedBuilds']) - set(builder['currentBuilds']) if finished: build_db.masters[master_url].setdefault(buildername, {})[ max(finished)] = build_scan_db.gen_build(finished=True) new_builds[buildername] = builder['currentBuilds'] return new_builds
def propagate_build_json_to_db(build_db, builds): """Propagates build status changes from build_json to build_db.""" for build_json, master, builder, buildnum in builds: build = build_db.masters[master].setdefault(builder, {}).get(buildnum) if not build: build = build_scan_db.gen_build() if build_json.get('results', None) is not None: build = build._replace(finished=True) # pylint: disable=W0212 build_db.masters[master][builder][buildnum] = build
def simulate_build_failure(build_db, master, builder, *steps): master_json = { 'project': { 'buildbotURL': master, 'title': 'Simulated Master', }, 'builders': [builder], } build_json = ( { 'builderName': builder, 'number': 0, 'steps': [{ 'name': s, 'isFinished': True, 'text': [ 'Simulated Build Step', ], 'logs': [], } for s in steps], 'results': FAILURE, 'reason': 'simulation', 'blame': ['you'], }, master, builder, 0, ) build_db.masters.setdefault(master, {}) build_db.masters[master].setdefault(builder, {}) build_db.masters[master][builder][0] = build_scan_db.gen_build( finished=True) return {master: master_json}, (build_json, )