def test_latest(self): _travis = _get_travispy() repo = get_travis_repo(_travis, 'travispy/on_pypy') builds = get_historical_builds(_travis, repo) build = next(builds) assert build.repository_id == 2598880 assert build.id == repo.last_build_id
def test_multiple_batches_bootstrap(self): """Test using a repository that has lots of builds, esp. PRs.""" _travis = _get_travispy() repo = get_travis_repo(_travis, 'twbs/bootstrap') builds = get_historical_builds(_travis, repo, _after=12071, _load_jobs=False) ids = [] prev_number = None for build in builds: assert build.repository_id == 12962 if int(build.number) in [ 12069, 12062, 12061, 12054, 12049, 12048, 12041, 12038, 12037, 12033 ]: # Many duplicates # See https://github.com/travis-ci/travis-ci/issues/2582 print('duplicate build number {0}: {1}'.format( build.number, build.id)) if build.id in [ 53437234, 53350534, 53350026, 53263731, 53263730, # two extra 12054 53180440, 53179846, 53062896, 53019568, 53004896, 52960766 ]: assert prev_number == int(build.number) else: assert prev_number == int(build.number) + 1 elif prev_number: # All other build numbers decrease rather orderly assert prev_number == int(build.number) + 1 prev_number = int(build.number) if ids: assert build.id < ids[-1] ids.append(build.id) # There are many more duplicates, so we stop here. if int(build.number) == 12033: break assert len(ids) == len(set(ids))
def test_all_small(self): _travis = _get_travispy() repo = get_travis_repo(_travis, 'travispy/on_pypy') builds = get_historical_builds(_travis, repo) ids = [] for build in builds: assert build.repository_id == 2598880 ids.append(build.id) assert ids == [53686685, 37521698, 28881355]
def test_after(self): _travis = _get_travispy() repo = get_travis_repo(_travis, 'travispy/on_pypy') builds = get_historical_builds(_travis, repo, _after=3, _load_jobs=False) build = next(builds) assert build.repository_id == 2598880 assert build.number == '2' build = next(builds) assert build.repository_id == 2598880 assert build.number == '1'
def test_multiple_batches_bootstrap(self): """Test using a repository that has lots of builds, esp. PRs.""" _travis = _get_travispy() repo = get_travis_repo(_travis, 'twbs/bootstrap') builds = get_historical_builds(_travis, repo, _after=12071, _load_jobs=False) ids = [] prev_number = None for build in builds: assert build.repository_id == 12962 if int(build.number) in [12069, 12062, 12061, 12054, 12049, 12048, 12041, 12038, 12037, 12033]: # Many duplicates # See https://github.com/travis-ci/travis-ci/issues/2582 print('duplicate build number {0}: {1}'.format( build.number, build.id)) if build.id in [53437234, 53350534, 53350026, 53263731, 53263730, # two extra 12054 53180440, 53179846, 53062896, 53019568, 53004896, 52960766]: assert prev_number == int(build.number) else: assert prev_number == int(build.number) + 1 elif prev_number: # All other build numbers decrease rather orderly assert prev_number == int(build.number) + 1 prev_number = int(build.number) if ids: assert build.id < ids[-1] ids.append(build.id) # There are many more duplicates, so we stop here. if int(build.number) == 12033: break assert len(ids) == len(set(ids))
def test_multiple_batches_menegazzo(self): """Test using a repository that has greater than 2*25 builds.""" # Ideally each has one or two jobs, so that doesnt slow down the test, # and the logs are small in case the log is fetched with the job. _travis = _get_travispy() repo = get_travis_repo(_travis, 'menegazzo/travispy') builds = get_historical_builds(_travis, repo, _load_jobs=False) ids = [] prev_number = None for build in builds: assert build.repository_id == 2419489 if int(build.number) in [80]: # There are two '80' # See https://github.com/travis-ci/travis-ci/issues/2582 print('duplicate build number {0}: {1}'.format( build.number, build.id)) assert build.id in [45019395, 45019396] if build.id == 45019395: assert prev_number == int(build.number) else: assert prev_number == int(build.number) + 1 elif prev_number: # All other build numbers decrease rather orderly assert prev_number == int(build.number) + 1 prev_number = int(build.number) if ids: assert build.id < ids[-1] ids.append(build.id) if len(ids) > 100: break assert len(ids) == len(set(ids))
def main(): """Main handler.""" options = config.get_options() if not options or options.verbose: logging.basicConfig( level=logging.DEBUG, format='%(asctime)s %(levelname)9s %(message)s', datefmt='%Y-%m-%d %H:%M:%S') __logs__.debug('{0!r}'.format(options)) t = config._get_travispy() gh = config._get_github() if t: user = t.user() else: user = None targets = [] for target_string in options.targets: if '://' in target_string: identifier = Target.from_url(target_string) targets.append(identifier) else: identifier = Target.from_extended_slug(target_string) targets.append(identifier) if options.refresh: slugs = get_stored_repo_slugs(options.dir, options.format) for slug in slugs: identifier = Target.from_simple_slug(slug) targets.append(identifier) if options.self: assert user targets += get_user_repos(t, user) if options.forks: for target in targets: slugs = get_forks(gh, target.slug) forks = get_travis_repos(t, slugs) targets += forks if options.all or options.old: count = None if options.all else options.count new_targets = [] for target in targets: new_targets += list(islice( get_historical_builds(t, target.slug, _load_jobs=False), count)) targets = new_targets # TODO: dont enumerate jobs if the files are all dated after the build end # TODO: enumerate all files starting with the job number, # and delete -started, -etc, when state is 'passed. if not options.force: targets = skip_stored(targets, options.dir, options.format) jobs = get_jobs(t, targets) if options.wait: jobs = get_completed(t, jobs, options.sleep) for job in jobs: download_job_log(options.dir, job, options.format)
def main(): """Main handler.""" options = config.get_options() if not options or options.verbose: logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)9s %(message)s', datefmt='%Y-%m-%d %H:%M:%S') __logs__.debug('{0!r}'.format(options)) t = config._get_travispy() gh = config._get_github() if t: user = t.user() else: user = None targets = [] for target_string in options.targets: if '://' in target_string: identifier = Target.from_url(target_string) targets.append(identifier) else: identifier = Target.from_extended_slug(target_string) targets.append(identifier) if options.refresh: slugs = get_stored_repo_slugs(options.dir, options.format) for slug in slugs: identifier = Target.from_simple_slug(slug) targets.append(identifier) if options.self: assert user targets += get_user_repos(t, user) if options.forks: for target in targets: slugs = get_forks(gh, target.slug) forks = get_travis_repos(t, slugs) targets += forks if options.all or options.old: count = None if options.all else options.count new_targets = [] for target in targets: new_targets += list( islice(get_historical_builds(t, target.slug, _load_jobs=False), count)) targets = new_targets # TODO: dont enumerate jobs if the files are all dated after the build end # TODO: enumerate all files starting with the job number, # and delete -started, -etc, when state is 'passed. if not options.force: targets = skip_stored(targets, options.dir, options.format) jobs = get_jobs(t, targets) if options.wait: jobs = get_completed(t, jobs, options.sleep) for job in jobs: download_job_log(options.dir, job, options.format)