def run(options): safe_rmtree('dist') urls = get_file_urls(options) completed = 0 exc = None with concurrent.futures.ThreadPoolExecutor() as e: fut_to_url = {e.submit(download_file, url): url for url in urls} for fut in concurrent.futures.as_completed(fut_to_url): url = fut_to_url[fut] try: local_fname = fut.result() except Exception: printerr("error while downloading %s" % (url)) raise else: completed += 1 print("downloaded %-45s %s" % (local_fname, bytes2human(os.path.getsize(local_fname)))) # 2 wheels (32 and 64 bit) per supported python version expected = len(PY_VERSIONS) * 2 if expected != completed: return exit("expected %s files, got %s" % (expected, completed)) if exc: return exit() rename_27_wheels()
def get_file_urls(options): with requests.Session() as session: data = session.get(BASE_URL + '/projects/' + options.user + '/' + options.project, timeout=TIMEOUT) data = data.json() urls = [] for job in (job['jobId'] for job in data['build']['jobs']): job_url = BASE_URL + '/buildjobs/' + job + '/artifacts' data = session.get(job_url, timeout=TIMEOUT) data = data.json() for item in data: file_url = job_url + '/' + item['fileName'] urls.append(file_url) if not urls: exit("no artifacts found") else: for url in sorted(urls, key=lambda x: os.path.basename(x)): yield url