def main(argv):
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--os-config-json',
        required=True,
        help=
        'The path to a JSON that specifies how to fetch OS build information')
    parser.add_argument(
        '--server-config-json',
        required=True,
        help='The path to a JSON file that specifies the perf dashboard')
    parser.add_argument('--seconds-to-sleep',
                        type=float,
                        default=43200,
                        help='The seconds to sleep between iterations')
    args = parser.parse_args()

    with open(args.os_config_json) as os_config_json:
        os_config_list = json.load(os_config_json)

    fetchers = [OSBuildFetcher(os_config) for os_config in os_config_list]

    while True:
        server_config = load_server_config(args.server_config_json)
        for fetcher in fetchers:
            fetcher.fetch_and_report_new_builds(server_config)
        print "Sleeping for %d seconds" % args.seconds_to_sleep
        time.sleep(args.seconds_to_sleep)
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--triggerable', required=True, help='The name of the triggerable to process. e.g. build-webkit')
    parser.add_argument('--buildbot-url', required=True, help='URL for a buildbot builder; e.g. "https://build.webkit.org/"')
    parser.add_argument('--builder-config-json', required=True, help='The path to a JSON file that specifies which test and platform will be posted to which builder. '
        'The JSON should contain an array of dictionaries with keys "platform", "test", and "builder" '
        'with the platform name (e.g. mountainlion), the test path (e.g. ["Parser", "html5-full-render"]), and the builder name (e.g. Apple MountainLion Release (Perf)) as values.')
    parser.add_argument('--server-config-json', required=True, help='The path to a JSON file that specifies the perf dashboard.')

    parser.add_argument('--lookback-count', type=int, default=10, help='The number of builds to look back when finding in-progress builds on the buildbot')
    parser.add_argument('--seconds-to-sleep', type=float, default=120, help='The seconds to sleep between iterations')
    args = parser.parse_args()

    configurations = load_config(args.builder_config_json, args.buildbot_url.strip('/'))

    request_updates = {}
    while True:
        server_config = load_server_config(args.server_config_json)
        request_updates.update(find_request_updates(configurations, args.lookback_count))
        if request_updates:
            print 'Updating the build requests %s...' % ', '.join(map(str, request_updates.keys()))
        else:
            print 'No updates...'

        payload = {
            'buildRequestUpdates': request_updates,
            'slaveName': server_config['slave']['name'],
            'slavePassword': server_config['slave']['password']}

        build_requests_url = server_config['server']['url'] + '/api/build-requests/' + args.triggerable + '?useLegacyIdResolution=true'
        response = update_and_fetch_build_requests(build_requests_url, payload)
        open_requests = response.get('buildRequests', [])

        root_sets = organize_root_sets_by_id_and_repository_names(response.get('rootSets', {}), response.get('roots', []))

        for request in filter(lambda request: request['status'] == 'pending', open_requests):
            config = config_for_request(configurations, request)
            if not config:
                print >> sys.stderr, "Failed to find the configuration for request %s: %s" % (str(request['id']), json.dumps(request))
                continue
            if config and len(config['scheduledRequests']) < 1:
                print "Scheduling the build request %s..." % str(request['id'])
                schedule_request(config, request, root_sets)

        request_updates = find_stale_request_updates(configurations, open_requests, request_updates.keys())
        if request_updates:
            print "Found stale build requests %s..." % ', '.join(map(str, request_updates.keys()))

        time.sleep(args.seconds_to_sleep)
Exemple #3
0
def main(argv):
    parser = argparse.ArgumentParser()
    parser.add_argument('--svn-config-json', required=True, help='The path to a JSON file that specifies subversion syncing options')
    parser.add_argument('--server-config-json', required=True, help='The path to a JSON file that specifies the perf dashboard')
    parser.add_argument('--seconds-to-sleep', type=float, default=900, help='The seconds to sleep between iterations')
    parser.add_argument('--max-fetch-count', type=int, default=10, help='The number of commits to fetch at once')
    args = parser.parse_args()

    with open(args.svn_config_json) as svn_config_json:
        svn_config = json.load(svn_config_json)

    while True:
        server_config = load_server_config(args.server_config_json)
        for repository_info in svn_config:
            fetch_commits_and_submit(repository_info, server_config, args.max_fetch_count)
        print "Sleeping for %d seconds..." % args.seconds_to_sleep
        time.sleep(args.seconds_to_sleep)
Exemple #4
0
def main(argv):
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--repository-config-json',
        required=True,
        help='The path to a JSON file that specifies subversion syncing options'
    )
    parser.add_argument(
        '--server-config-json',
        required=True,
        help='The path to a JSON file that specifies the perf dashboard')
    parser.add_argument('--seconds-to-sleep',
                        type=float,
                        default=900,
                        help='The seconds to sleep between iterations')
    parser.add_argument('--max-fetch-count',
                        type=int,
                        default=10,
                        help='The number of commits to fetch at once')
    parser.add_argument(
        '--max-ancestor-fetch-count',
        type=int,
        default=100,
        help=
        'The number of commits to fetch at once if some commits are missing previous commits'
    )
    args = parser.parse_args()

    with open(args.repository_config_json) as repository_config_json:
        repositories = [
            load_repository(repository_info)
            for repository_info in json.load(repository_config_json)
        ]

    while True:
        server_config = load_server_config(args.server_config_json)
        for repository in repositories:
            try:
                repository.fetch_commits_and_submit(
                    server_config, args.max_fetch_count,
                    args.max_ancestor_fetch_count)
            except Exception as error:
                print "Failed to fetch and sync:", error

        print "Sleeping for %d seconds..." % args.seconds_to_sleep
        time.sleep(args.seconds_to_sleep)
Exemple #5
0
def main(argv):
    parser = argparse.ArgumentParser()
    parser.add_argument('--os-config-json', required=True, help='The path to a JSON that specifies how to fetch OS build information')
    parser.add_argument('--server-config-json', required=True, help='The path to a JSON file that specifies the perf dashboard')
    parser.add_argument('--seconds-to-sleep', type=float, default=43200, help='The seconds to sleep between iterations')
    args = parser.parse_args()

    with open(args.os_config_json) as os_config_json:
        os_config_list = json.load(os_config_json)

    fetchers = [OSBuildFetcher(os_config) for os_config in os_config_list]

    while True:
        server_config = load_server_config(args.server_config_json)
        for fetcher in fetchers:
            fetcher.fetch_and_report_new_builds(server_config)
        print "Sleeping for %d seconds" % args.seconds_to_sleep
        time.sleep(args.seconds_to_sleep)
def main(argv):
    parser = argparse.ArgumentParser()
    parser.add_argument('--repository-config-json', required=True, help='The path to a JSON file that specifies subversion syncing options')
    parser.add_argument('--server-config-json', required=True, help='The path to a JSON file that specifies the perf dashboard')
    parser.add_argument('--seconds-to-sleep', type=float, default=900, help='The seconds to sleep between iterations')
    parser.add_argument('--max-fetch-count', type=int, default=10, help='The number of commits to fetch at once')
    args = parser.parse_args()

    with open(args.repository_config_json) as repository_config_json:
        repositories = [load_repository(repository_info) for repository_info in json.load(repository_config_json)]

    while True:
        server_config = load_server_config(args.server_config_json)
        for repository in repositories:
            try:
                repository.fetch_commits_and_submit(server_config, args.max_fetch_count)
            except Exception as error:
                print "Failed to fetch and sync:", error

        print "Sleeping for %d seconds..." % args.seconds_to_sleep
        time.sleep(args.seconds_to_sleep)
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--server-config-json',
        required=True,
        help='The path to a JSON file that specifies the perf dashboard.')
    args = parser.parse_args()

    maintenace_dir = determine_maintenance_dir()

    server_config = load_server_config(args.server_config_json)

    print 'Submitting results in "%s" to "%s"' % (
        maintenace_dir, server_config['server']['url'])

    for filename in os.listdir(maintenace_dir):
        path = os.path.join(maintenace_dir, filename)
        if os.path.isfile(path) and filename.endswith('.json'):

            with open(os.path.join(maintenace_dir,
                                   path)) as submitted_json_file:
                submitted_content = submitted_json_file.read()

            print '%s...' % filename,
            sys.stdout.flush()

            suffix = '.done'
            while True:
                if submit_report(server_config, submitted_content):
                    break
                if ask_yes_no_question(
                        'Suffix the file with .error and continue?'):
                    suffix = '.error'
                    break
                else:
                    sys.exit(0)

            os.rename(path, path + suffix)

            print 'Done'
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "--triggerable", required=True, help="The name of the triggerable to process. e.g. build-webkit"
    )
    parser.add_argument(
        "--buildbot-url", required=True, help='URL for a buildbot builder; e.g. "https://build.webkit.org/"'
    )
    parser.add_argument(
        "--builder-config-json",
        required=True,
        help="The path to a JSON file that specifies which test and platform will be posted to which builder. "
        'The JSON should contain an array of dictionaries with keys "platform", "test", and "builder" '
        'with the platform name (e.g. mountainlion), the test path (e.g. ["Parser", "html5-full-render"]), and the builder name (e.g. Apple MountainLion Release (Perf)) as values.',
    )
    parser.add_argument(
        "--server-config-json", required=True, help="The path to a JSON file that specifies the perf dashboard."
    )

    parser.add_argument(
        "--lookback-count",
        type=int,
        default=10,
        help="The number of builds to look back when finding in-progress builds on the buildbot",
    )
    parser.add_argument("--seconds-to-sleep", type=float, default=120, help="The seconds to sleep between iterations")
    args = parser.parse_args()

    configurations = load_config(args.builder_config_json, args.buildbot_url.strip("/"))

    request_updates = {}
    while True:
        server_config = load_server_config(args.server_config_json)
        request_updates.update(find_request_updates(configurations, args.lookback_count))
        if request_updates:
            print "Updating the build requests %s..." % ", ".join(map(str, request_updates.keys()))
        else:
            print "No updates..."

        payload = {
            "buildRequestUpdates": request_updates,
            "slaveName": server_config["slave"]["name"],
            "slavePassword": server_config["slave"]["password"],
        }

        build_requests_url = server_config["server"]["url"] + "/api/build-requests/" + args.triggerable
        response = update_and_fetch_build_requests(build_requests_url, payload)
        open_requests = response.get("buildRequests", [])

        root_sets = organize_root_sets_by_id_and_repository_names(
            response.get("rootSets", {}), response.get("roots", [])
        )

        for request in filter(lambda request: request["status"] == "pending", open_requests):
            config = config_for_request(configurations, request)
            if not config:
                print >> sys.stderr, "Failed to find the configuration for request %s: %s" % (
                    str(request["id"]),
                    json.dumps(request),
                )
                continue
            if config and len(config["scheduledRequests"]) < 1:
                print "Scheduling the build request %s..." % str(request["id"])
                schedule_request(config, request, root_sets)

        request_updates = find_stale_request_updates(configurations, open_requests, request_updates.keys())
        if request_updates:
            print "Found stale build requests %s..." % ", ".join(map(str, request_updates.keys()))

        time.sleep(args.seconds_to_sleep)