示例#1
0
def main(argv):
    parser = argparse.ArgumentParser()
    parser.add_argument('--config',
                        required=True,
                        help='Path to a config JSON file')
    args = parser.parse_args()

    with open(args.config) as config_file:
        config = json.load(config_file)

    setup_auth(config['server'])

    submission_size = config['submissionSize']
    reported_revisions = set()

    while True:
        if 'customCommands' in config:
            available_builds = []
            for command in config['customCommands']:
                print "Executing", ' '.join(command['command'])
                available_builds += available_builds_from_command(
                    config['repositoryName'], command['command'],
                    command['linesToIgnore'])
                print "Got %d builds" % len(available_builds)
        else:
            url = config['buildSourceURL']
            print "Fetching available builds from", url
            available_builds = fetch_available_builds(
                config['repositoryName'], url, config['trainVersionMap'])

        available_builds = filter(
            lambda commit: commit['revision'] not in reported_revisions,
            available_builds)
        print "%d builds available" % len(available_builds)

        while available_builds:
            commits_to_submit = available_builds[:submission_size]
            revisions_to_report = map(lambda commit: commit['revision'],
                                      commits_to_submit)
            print "Submitting builds (%d remaining):" % len(
                available_builds), json.dumps(revisions_to_report)
            available_builds = available_builds[submission_size:]

            submit_commits(commits_to_submit, config['server']['url'],
                           config['slave']['name'],
                           config['slave']['password'])
            reported_revisions |= set(revisions_to_report)

            time.sleep(config['submissionInterval'])

        print "Sleeping for %d seconds" % config['fetchInterval']
        time.sleep(config['fetchInterval'])
示例#2
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--triggerable', required=True, help='The name of the triggerable to process. e.g. build-webkit')
    parser.add_argument('--buildbot-url', required=True, help='URL for a buildbot builder; e.g. "https://build.webkit.org/"')
    parser.add_argument('--builder-config-json', required=True, help='The path to a JSON file that specifies which test and platform will be posted to which builder. '
        'The JSON should contain an array of dictionaries with keys "platform", "test", and "builder" '
        'with the platform name (e.g. mountainlion), the test path (e.g. ["Parser", "html5-full-render"]), and the builder name (e.g. Apple MountainLion Release (Perf)) as values.')
    parser.add_argument('--server-config-json', required=True, help='The path to a JSON file that specifies the perf dashboard.')

    parser.add_argument('--lookback-count', type=int, default=10, help='The number of builds to look back when finding in-progress builds on the buildbot')
    parser.add_argument('--seconds-to-sleep', type=float, default=120, help='The seconds to sleep between iterations')
    args = parser.parse_args()

    configurations = load_config(args.builder_config_json, args.buildbot_url.strip('/'))

    with open(args.server_config_json) as server_config_json:
        server_config = json.load(server_config_json)
        setup_auth(server_config['server'])

    build_requests_url = server_config['server']['url'] + '/api/build-requests/' + args.triggerable

    request_updates = {}
    while True:
        request_updates.update(find_request_updates(configurations, args.lookback_count))
        if request_updates:
            print 'Updating the build requests %s...' % ', '.join(map(str, request_updates.keys()))
        else:
            print 'No updates...'

        payload = {
            'buildRequestUpdates': request_updates,
            'slaveName': server_config['slave']['name'],
            'slavePassword': server_config['slave']['password']}
        response = update_and_fetch_build_requests(build_requests_url, payload)
        open_requests = response.get('buildRequests', [])

        root_sets = organize_root_sets_by_id_and_repository_names(response.get('rootSets', {}), response.get('roots', []))

        for request in filter(lambda request: request['status'] == 'pending', open_requests):
            config = config_for_request(configurations, request)
            if not config:
                print >> sys.stderr, "Failed to find the configuration for request %s: %s" % (str(request['id']), json.dumps(request))
                continue
            if config and len(config['scheduledRequests']) < 1:
                print "Scheduling the build request %s..." % str(request['id'])
                schedule_request(config, request, root_sets)

        request_updates = find_stale_request_updates(configurations, open_requests, request_updates.keys())
        if request_updates:
            print "Found stale build requests %s..." % ', '.join(map(str, request_updates.keys()))

        time.sleep(args.seconds_to_sleep)
示例#3
0
def main(argv):
    parser = argparse.ArgumentParser()
    parser.add_argument('--svn-config-json', required=True, help='The path to a JSON file that specifies subversion syncing options')
    parser.add_argument('--server-config-json', required=True, help='The path to a JSON file that specifies the perf dashboard')
    parser.add_argument('--seconds-to-sleep', type=float, default=900, help='The seconds to sleep between iterations')
    parser.add_argument('--max-fetch-count', type=int, default=10, help='The number of commits to fetch at once')
    args = parser.parse_args()

    with open(args.server_config_json) as server_config_json:
        server_config = json.load(server_config_json)
        setup_auth(server_config['server'])

    with open(args.svn_config_json) as svn_config_json:
        svn_config = json.load(svn_config_json)

    while True:
        for repository_info in svn_config:
            fetch_commits_and_submit(repository_info, server_config, args.max_fetch_count)
        print "Sleeping for %d seconds..." % args.seconds_to_sleep
        time.sleep(args.seconds_to_sleep)
示例#4
0
def main(argv):
    parser = argparse.ArgumentParser()
    parser.add_argument('--os-config-json', required=True, help='The path to a JSON that specifies how to fetch OS build information')
    parser.add_argument('--server-config-json', required=True, help='The path to a JSON file that specifies the perf dashboard')
    parser.add_argument('--seconds-to-sleep', type=float, default=43200, help='The seconds to sleep between iterations')
    args = parser.parse_args()

    with open(args.os_config_json) as os_config_json:
        os_config_list = json.load(os_config_json)

    with open(args.server_config_json) as server_config_json:
        server_config = json.load(server_config_json)
        setup_auth(server_config['server'])

    fetchers = [OSBuildFetcher(os_config) for os_config in os_config_list]

    while True:
        for fetcher in fetchers:
            fetcher.fetch_and_report_new_builds(server_config)
        print "Sleeping for %d seconds" % args.seconds_to_sleep
        time.sleep(args.seconds_to_sleep)
示例#5
0
def single_tweet(request, username, tweet_id):#todo: linkify username back to list?
    # ignore favicon requests
    if username == 'favicon.ico':
        return None

    auth = setup_auth(request)
    tweet, http_status = fetch_tweet(username, tweet_id, auth)
    if not tweet:
        return general_error(request, username, http_status)
    return render_to_response('single_tweet.html',
                              {'tweet':tweet, 'username':username,},
                              context_instance=RequestContext(request),)
示例#6
0
def user_timeline(request, username, page_num=1):
    # ignore favicon requests
    if username == 'favicon.ico':
        return None
    # take a look at IE before shipping
    auth = setup_auth(request)
    user, tweets, http_status = fetch_page(username, page_num, auth)
    if not tweets and http_status != 200:
        return general_error(request, username, http_status)
    requested_page = page_num
    if len(tweets) < 20: # todo: this won't work if last page has 20 tweets
        page_num = -1 # we're on last page, so set page_num to -1 so next link won't show
    return render_to_response('user_timeline.html',
                             {'tweets':tweets, 'username':user.username, 'old_timer_and_or_gabber':user.old_timer_and_or_gabber, 'requested_page':requested_page, 'prev_page':int(page_num)-1, 'next_page':int(page_num)+1,},#detect when there are no more pages
                             context_instance=RequestContext(request),)