def main(reactor):
    if not BASE_DIR.exists():
        BASE_DIR.makedirs()
    base_path = 'job/ClusterHQ-flocker/job/master/job/__main_multijob/'
    d = jenkins_json_get(
        base_path + 'api/json?tree=builds[result,number,timestamp,'
        'subBuilds[result,buildNumber,jobName,url,timestamp]]')

    def write_main_data(data):
        filename = 'api.' + datetime.datetime.utcnow().isoformat() + '.json'
        json.dump(data, BASE_DIR.child(filename).open('wb'))
        return data
    d.addCallback(write_main_data)

    d.addCallback(_get_failure_urls)

    def download_failed_logs(urls):
        sem = defer.DeferredSemaphore(MAX_CONCURRENT_REQUESTS)
        deferreds = map(partial(fetch_failure_data, sem), urls)
        return defer.DeferredList(deferreds)

    d.addCallback(download_failed_logs)
    return d
def main(reactor):
    if not BASE_DIR.exists():
        BASE_DIR.makedirs()
    base_path = 'job/ClusterHQ-flocker/job/master/job/__main_multijob/'
    d = jenkins_json_get(
        base_path + 'api/json?tree=builds[result,number,timestamp,duration,'
        'subBuilds[result,buildNumber,jobName,url,timestamp,duration]]')

    def write_main_data(data):
        filename = 'api.' + datetime.datetime.utcnow().isoformat() + '.json'
        json.dump(data, BASE_DIR.child(filename).open('wb'))
        return data

    d.addCallback(write_main_data)

    d.addCallback(_get_failure_urls)

    def download_failed_logs(urls):
        sem = defer.DeferredSemaphore(MAX_CONCURRENT_REQUESTS)
        deferreds = map(partial(fetch_failure_data, sem), urls)
        return defer.DeferredList(deferreds)

    d.addCallback(download_failed_logs)
    return d
def load_build_data(since=None):
    """
    Load the build data.

    :param Optional[datetime] since: only builds newer than this datetime
           will be included if this is provided.
    :return Iterable[dict]: an iterable of build records.
    """
    info_files = BASE_DIR.globChildren('api.*.json')
    assert info_files, "Haven't downloaded any data"
    info_files.sort(key=lambda x: x.path)
    api_data = info_files[-1]
    with api_data.open() as f:
        builds = json.load(f)['builds']
        if since:
            builds = builds_since(builds, since)
        return builds
Ejemplo n.º 4
0
def load_build_data(since=None):
    """
    Load the build data.

    :param Optional[datetime] since: only builds newer than this datetime
           will be included if this is provided.
    :return Iterable[dict]: an iterable of build records.
    """
    info_files = BASE_DIR.globChildren('api.*.json')
    assert info_files, "Haven't downloaded any data"
    info_files.sort(key=lambda x: x.path)
    api_data = info_files[-1]
    with api_data.open() as f:
        builds = json.load(f)['builds']
        if since:
            builds = builds_since(builds, since)
        return builds
 def write_main_data(data):
     filename = 'api.' + datetime.datetime.utcnow().isoformat() + '.json'
     json.dump(data, BASE_DIR.child(filename).open('wb'))
     return data
 def write_main_data(data):
     filename = 'api.' + datetime.datetime.utcnow().isoformat() + '.json'
     json.dump(data, BASE_DIR.child(filename).open('wb'))
     return data