def main(): options, args = get_options() logging.basicConfig( level=logging.DEBUG if options.verbose else logging.INFO) masters = {} for m in set(args): masters[m] = BUILDER_WILDCARD if options.clear_build_db: build_db = {} build_scan_db.save_build_db(build_db, {}, options.build_db) else: build_db = build_scan_db.get_build_db(options.build_db) _, build_jsons = get_updated_builds(masters, build_db, options.parallelism, options.milo_creds) for _, master_url, builder, buildnum in build_jsons: print '%s:%s:%s' % (master_url, builder, buildnum) if not options.skip_build_db_update: build_scan_db.save_build_db(build_db, {}, options.build_db) return 0
def main(): options, args = get_options() logging.basicConfig(level=logging.DEBUG if options.verbose else logging.INFO) masters = set(args) if options.clear_build_db: build_db = {} build_scan_db.save_build_db(build_db, {}, options.build_db) else: build_db = build_scan_db.get_build_db(options.build_db) _, build_jsons = get_updated_builds( masters, build_db, options.parallelism) for _, master_url, builder, buildnum in build_jsons: print '%s:%s:%s' % (master_url, builder, buildnum) if not options.skip_build_db_update: build_scan_db.save_build_db(build_db, {}, options.build_db) return 0
def main(argv): args = get_args(argv) logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) gatekeeper_config = gatekeeper_ng_config.load_gatekeeper_config(args.json) if args.verify: return 0 simulate = bool(args.simulate_master) if args.flatten_json: if not args.no_hashes: gatekeeper_config = gatekeeper_ng_config.inject_hashes( gatekeeper_config) gatekeeper_ng_config.flatten_to_json(gatekeeper_config, sys.stdout) print return 0 if args.set_status and not simulate: args.password = get_pwd(args.password_file) masters = defaultdict(set) for m in args.master_url: if m.count(':') > 1: # Master in master_url:builder,builder format. http, mname, builderlist = m.split(':', 2) mastername = ':'.join([http, mname]) masters[mastername].update(builderlist.split(',')) else: # Regular master URL, just add '*'. masters[m].add(build_scan.BUILDER_WILDCARD) if not set(masters) <= set(gatekeeper_config): print 'The following masters are not present in the gatekeeper config:' for m in set(masters) - set(gatekeeper_config): print ' ' + m return 1 emoji = [] if args.emoji != 'None': try: with open(args.emoji) as f: emoji = json.load(f) except (IOError, ValueError) as e: logging.warning('Could not load emoji file %s: %s', args.emoji, e) if args.clear_build_db: build_db = build_scan_db.gen_db() build_scan_db.save_build_db(build_db, gatekeeper_config, args.build_db) else: build_db = build_scan_db.get_build_db(args.build_db) if not simulate: master_jsons, build_jsons = build_scan.get_updated_builds( masters, build_db, args.parallelism, args.milo_creds) else: master_jsons, build_jsons = simulate_build_failure( build_db, args.simulate_master, args.simulate_builder, *args.simulate_step) if args.sync_build_db: build_scan_db.save_build_db(build_db, gatekeeper_config, args.build_db) return 0 (failure_tuples, success_tuples, successful_builder_steps, current_builds_successful) = check_builds(build_jsons, master_jsons, gatekeeper_config) # Write failure / success information back to the build_db. propagate_build_status_back_to_db(failure_tuples, success_tuples, build_db) # opening is an option, mostly to keep the unittests working which # assume that any setting of status is negative. if args.open_tree: open_tree_if_possible(build_db, master_jsons, successful_builder_steps, current_builds_successful, args.status_user, args.password, args.status_url, args.set_status, emoji, simulate) # debounce_failures does 3 things: # 1. Groups logging by builder # 2. Selects out the "build" part from the failure tuple. # 3. Rejects builds we've already warned about (and logs). new_failures = debounce_failures(failure_tuples, current_builds_successful, build_db) if args.track_revisions: # Only close the tree if it's a newer revision than before. properties = args.revision_properties.split(',') triggered_revisions = build_db.aux.get('triggered_revisions', {}) if not triggered_revisions or (sorted(triggered_revisions) != sorted(properties)): logging.info( 'revision properties have changed from %s to %s. ' 'clearing previous data.', triggered_revisions, properties) build_db.aux['triggered_revisions'] = dict.fromkeys(properties) new_failures = reject_old_revisions(new_failures, build_db) close_tree_if_necessary(build_db, new_failures, args.status_user, args.password, args.status_url, args.set_status, args.revision_properties.split(','), simulate) try: notify_failures(new_failures, args.sheriff_url, args.default_from_email, args.email_app_url, args.email_app_secret, args.email_domain, args.filter_domain, args.disable_domain_filter, simulate) finally: if not args.skip_build_db_update and not simulate: build_scan_db.save_build_db(build_db, gatekeeper_config, args.build_db) return 0
def main(): options, args = get_options() logging.basicConfig( level=logging.DEBUG if options.verbose else logging.INFO) gatekeeper_config = gatekeeper_ng_config.load_gatekeeper_config( options.json) if options.verify: return 0 if options.flatten_json: if not options.no_hashes: gatekeeper_config = gatekeeper_ng_config.inject_hashes( gatekeeper_config) gatekeeper_ng_config.flatten_to_json(gatekeeper_config, sys.stdout) print return 0 if options.set_status: options.password = get_pwd(options.password_file) masters = set(args) if not masters <= set(gatekeeper_config): print 'The following masters are not present in the gatekeeper config:' for m in masters - set(gatekeeper_config): print ' ' + m return 1 if options.clear_build_db: build_db = {} build_scan_db.save_build_db(build_db, gatekeeper_config, options.build_db) else: build_db = build_scan_db.get_build_db(options.build_db) master_jsons, build_jsons = build_scan.get_updated_builds( masters, build_db, options.parallelism) if options.sync_build_db: build_scan_db.save_build_db(build_db, gatekeeper_config, options.build_db) return 0 failure_tuples = check_builds(build_jsons, master_jsons, gatekeeper_config) # opening is an option, mostly to keep the unittests working which # assume that any setting of status is negative. if options.open_tree: # failures are actually tuples, we only care about the build part. failing_builds = [b[0] for b in failure_tuples] open_tree_if_possible(failing_builds, options.status_user, options.password, options.status_url, options.set_status) # debounce_failures does 3 things: # 1. Groups logging by builder # 2. Selects out the "build" part from the failure tuple. # 3. Rejects builds we've already warned about (and logs). new_failures = debounce_failures(failure_tuples, build_db) close_tree_if_necessary(new_failures, options.status_user, options.password, options.status_url, options.set_status) notify_failures(new_failures, options.sheriff_url, options.default_from_email, options.email_app_url, options.email_app_secret, options.email_domain, options.filter_domain, options.disable_domain_filter) if not options.skip_build_db_update: build_scan_db.save_build_db(build_db, gatekeeper_config, options.build_db) return 0
def main(): options, args = get_options() logging.basicConfig(level=logging.DEBUG if options.verbose else logging.INFO) gatekeeper_config = gatekeeper_ng_config.load_gatekeeper_config(options.json) if options.verify: return 0 if options.flatten_json: if not options.no_hashes: gatekeeper_config = gatekeeper_ng_config.inject_hashes(gatekeeper_config) gatekeeper_ng_config.flatten_to_json(gatekeeper_config, sys.stdout) print return 0 if options.set_status: options.password = get_pwd(options.password_file) masters = set(args) if not masters <= set(gatekeeper_config): print 'The following masters are not present in the gatekeeper config:' for m in masters - set(gatekeeper_config): print ' ' + m return 1 if options.clear_build_db: build_db = {} build_scan_db.save_build_db(build_db, gatekeeper_config, options.build_db) else: build_db = build_scan_db.get_build_db(options.build_db) master_jsons, build_jsons = build_scan.get_updated_builds( masters, build_db, options.parallelism) if options.sync_build_db: build_scan_db.save_build_db(build_db, gatekeeper_config, options.build_db) return 0 failure_tuples = check_builds(build_jsons, master_jsons, gatekeeper_config) # opening is an option, mostly to keep the unittests working which # assume that any setting of status is negative. if options.open_tree: # failures are actually tuples, we only care about the build part. failing_builds = [b[0] for b in failure_tuples] open_tree_if_possible(failing_builds, options.status_user, options.password, options.status_url, options.set_status) # debounce_failures does 3 things: # 1. Groups logging by builder # 2. Selects out the "build" part from the failure tuple. # 3. Rejects builds we've already warned about (and logs). new_failures = debounce_failures(failure_tuples, build_db) close_tree_if_necessary(new_failures, options.status_user, options.password, options.status_url, options.set_status) notify_failures(new_failures, options.sheriff_url, options.default_from_email, options.email_app_url, options.email_app_secret, options.email_domain, options.filter_domain, options.disable_domain_filter) if not options.skip_build_db_update: build_scan_db.save_build_db(build_db, gatekeeper_config, options.build_db) return 0
def main(): options, args = get_options() logging.basicConfig(level=logging.DEBUG if options.verbose else logging.INFO) gatekeeper_config = gatekeeper_ng_config.load_gatekeeper_config(options.json) if options.verify: return 0 if options.flatten_json: if not options.no_hashes: gatekeeper_config = gatekeeper_ng_config.inject_hashes(gatekeeper_config) gatekeeper_ng_config.flatten_to_json(gatekeeper_config, sys.stdout) print return 0 if options.set_status: options.password = get_pwd(options.password_file) masters = set(args) if not masters <= set(gatekeeper_config): print 'The following masters are not present in the gatekeeper config:' for m in masters - set(gatekeeper_config): print ' ' + m return 1 emoji = [] if options.emoji != 'None': try: with open(options.emoji) as f: emoji = json.load(f) except (IOError, ValueError) as e: logging.warning('Could not load emoji file %s: %s', options.emoji, e) if options.clear_build_db: build_db = {} build_scan_db.save_build_db(build_db, gatekeeper_config, options.build_db) else: build_db = build_scan_db.get_build_db(options.build_db) master_jsons, build_jsons = build_scan.get_updated_builds( masters, build_db, options.parallelism) if options.sync_build_db: build_scan_db.save_build_db(build_db, gatekeeper_config, options.build_db) return 0 (failure_tuples, success_tuples, successful_builder_steps, current_builds_successful) = check_builds( build_jsons, master_jsons, gatekeeper_config) # Write failure / success information back to the build_db. propagate_build_status_back_to_db(failure_tuples, success_tuples, build_db) # opening is an option, mostly to keep the unittests working which # assume that any setting of status is negative. if options.open_tree: open_tree_if_possible(build_db, master_jsons, successful_builder_steps, current_builds_successful, options.status_user, options.password, options.status_url, options.set_status, emoji) # debounce_failures does 3 things: # 1. Groups logging by builder # 2. Selects out the "build" part from the failure tuple. # 3. Rejects builds we've already warned about (and logs). new_failures = debounce_failures(failure_tuples, current_builds_successful, build_db) if options.track_revisions: # Only close the tree if it's a newer revision than before. properties = options.revision_properties.split(',') triggered_revisions = build_db.aux.get('triggered_revisions', {}) if not triggered_revisions or ( sorted(triggered_revisions) != sorted(properties)): logging.info('revision properties have changed from %s to %s. ' 'clearing previous data.', triggered_revisions, properties) build_db.aux['triggered_revisions'] = dict.fromkeys(properties) new_failures = reject_old_revisions(new_failures, build_db) close_tree_if_necessary(new_failures, options.status_user, options.password, options.status_url, options.set_status, options.revision_properties.split(',')) notify_failures(new_failures, options.sheriff_url, options.default_from_email, options.email_app_url, options.email_app_secret, options.email_domain, options.filter_domain, options.disable_domain_filter) if not options.skip_build_db_update: build_scan_db.save_build_db(build_db, gatekeeper_config, options.build_db) return 0
def main(): options, args = get_options() logging.basicConfig( level=logging.DEBUG if options.verbose else logging.INFO) gatekeeper_config = gatekeeper_ng_config.load_gatekeeper_config( options.json) if options.verify: return 0 if options.flatten_json: if not options.no_hashes: gatekeeper_config = gatekeeper_ng_config.inject_hashes( gatekeeper_config) gatekeeper_ng_config.flatten_to_json(gatekeeper_config, sys.stdout) print return 0 if options.set_status: options.password = get_pwd(options.password_file) masters = set(args) if not masters <= set(gatekeeper_config): print 'The following masters are not present in the gatekeeper config:' for m in masters - set(gatekeeper_config): print ' ' + m return 1 emoji = [] if options.emoji != 'None': try: with open(options.emoji) as f: emoji = json.load(f) except (IOError, ValueError) as e: logging.warning('Could not load emoji file %s: %s', options.emoji, e) if options.clear_build_db: build_db = {} build_scan_db.save_build_db(build_db, gatekeeper_config, options.build_db) else: build_db = build_scan_db.get_build_db(options.build_db) master_jsons, build_jsons = build_scan.get_updated_builds( masters, build_db, options.parallelism) if options.sync_build_db: build_scan_db.save_build_db(build_db, gatekeeper_config, options.build_db) return 0 (failure_tuples, success_tuples, successful_builder_steps, current_builds_successful) = check_builds(build_jsons, master_jsons, gatekeeper_config) # Write failure / success information back to the build_db. propagate_build_status_back_to_db(failure_tuples, success_tuples, build_db) # opening is an option, mostly to keep the unittests working which # assume that any setting of status is negative. if options.open_tree: open_tree_if_possible(build_db, master_jsons, successful_builder_steps, current_builds_successful, options.status_user, options.password, options.status_url, options.set_status, emoji) # debounce_failures does 3 things: # 1. Groups logging by builder # 2. Selects out the "build" part from the failure tuple. # 3. Rejects builds we've already warned about (and logs). new_failures = debounce_failures(failure_tuples, current_builds_successful, build_db) if options.track_revisions: # Only close the tree if it's a newer revision than before. properties = options.revision_properties.split(',') triggered_revisions = build_db.aux.get('triggered_revisions', {}) if not triggered_revisions or (sorted(triggered_revisions) != sorted(properties)): logging.info( 'revision properties have changed from %s to %s. ' 'clearing previous data.', triggered_revisions, properties) build_db.aux['triggered_revisions'] = dict.fromkeys(properties) new_failures = reject_old_revisions(new_failures, build_db) close_tree_if_necessary(new_failures, options.status_user, options.password, options.status_url, options.set_status, options.revision_properties.split(',')) notify_failures(new_failures, options.sheriff_url, options.default_from_email, options.email_app_url, options.email_app_secret, options.email_domain, options.filter_domain, options.disable_domain_filter) if not options.skip_build_db_update: build_scan_db.save_build_db(build_db, gatekeeper_config, options.build_db) return 0