def make_progress() -> Progress: _time = 0.0 def fake_time(): nonlocal _time try: return _time finally: _time += 1 console = Console( file=io.StringIO(), force_terminal=True, color_system="truecolor", width=80, legacy_windows=False, _environ={}, ) progress = Progress(console=console, get_time=fake_time, auto_refresh=False) task1 = progress.add_task("foo") task2 = progress.add_task("bar", total=30) progress.advance(task2, 16) task3 = progress.add_task("baz", visible=False) task4 = progress.add_task("egg") progress.remove_task(task4) task4 = progress.add_task("foo2", completed=50, start=False) progress.stop_task(task4) progress.start_task(task4) progress.update( task4, total=200, advance=50, completed=200, visible=True, refresh=True ) progress.stop_task(task4) return progress
def index_object(self, engine, bucket, obj, progress=None, overall=None): """ Read a file in S3, index it, and insert records into the table. """ key, version, size = obj['Key'], obj['ETag'].strip('"'), obj['Size'] key_id = self.insert_key(engine, key, version) # read the file from s3 content = read_object(bucket, key) start_offset = 0 records = {} # per-file progress bar rel_key = relative_key(key, self.s3_prefix) file_progress = progress and progress.add_task(f'[yellow]{rel_key}[/]', total=size) # process each line (record) for line_num, line in enumerate(content.iter_lines()): row = orjson.loads(line) end_offset = start_offset + len(line) + 1 # newline try: for key_tuple in self.schema.index_builder(row): if key_tuple in records: records[key_tuple]['end_offset'] = end_offset else: records[key_tuple] = { 'key': key_id, 'start_offset': start_offset, 'end_offset': end_offset, } except (KeyError, ValueError) as e: logging.warning('%s; skipping...', e) # update progress if progress: progress.update(file_progress, completed=end_offset) # track current file offset start_offset = end_offset # done with this object; tick the overall progress if progress: progress.remove_task(file_progress) progress.advance(overall, advance=size) # NOTE: Because this is called as a job, be sure and return a iterator # and not the records as this is memory that is kept around for # the entire duration of indexing. return key, ({ **self.schema.column_values(k), **r } for k, r in records.items())
async def generate_site( *, theme: Theme, progress: rich.progress.Progress, ) -> None: task = progress.add_task(theme.name, total=5) env = IsolatedEnvironment(theme.name) destination_path = DESTINATION["sites"] / theme.name try: await env.create(delete="CI" in os.environ) progress.advance(task, 1) progress.log(f"[yellow]{theme.name}[reset]: Installing packages...") await env.install("--pre", "sphinx") progress.advance(task, 1) await env.install(theme.pypi_package) progress.advance(task, 1) render_conf_template(theme, env.path / "conf.py") progress.advance(task, 1) if destination_path.exists(): shutil.rmtree(destination_path) progress.log(f"[yellow]{theme.name}[reset]: Building site...") returncode, output = await env.run( "sphinx-build", "-v", "-b=dirhtml", f"-c={env.path}", str(BUILD["sources"]), str(destination_path), ) progress.advance(task, 1) if returncode: stdout, stderr = output message = [ " stdout ".center(88, "="), stdout.decode(), " stderr ".center(88, "="), stderr.decode(), ] raise Exception("\n".join(message)) except Exception as e: progress.log(f"Fail: [red]{theme.name}[reset]\n\t{e}") destination_path.mkdir(parents=True, exist_ok=True) (destination_path / "index.html").write_text(get_error_page(theme, e)) else: progress.log(f"Done: [green]{theme.name}[reset]") finally: progress.remove_task(task)
def singularity_pull_image(self, container, out_path, cache_path, progress): """Pull a singularity image using ``singularity pull`` Attempt to use a local installation of singularity to pull the image. Args: container (str): A pipeline's container name. Usually it is of similar format to ``nfcore/name:version``. Raises: Various exceptions possible from `subprocess` execution of Singularity. """ output_path = cache_path or out_path # Pull using singularity address = "docker://{}".format(container.replace("docker://", "")) singularity_command = [ "singularity", "pull", "--name", output_path, address ] log.debug("Building singularity image: {}".format(address)) log.debug("Singularity command: {}".format( " ".join(singularity_command))) # Progress bar to show that something is happening task = progress.add_task(container, start=False, total=False, progress_type="singularity_pull", current_log="") # Run the singularity pull command proc = subprocess.Popen( singularity_command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, bufsize=1, ) for line in proc.stdout: log.debug(line.strip()) progress.update(task, current_log=line.strip()) # Copy cached download if we are using the cache if cache_path: log.debug("Copying {} from cache: '{}'".format( container, os.path.basename(out_path))) progress.update( task, current_log="Copying from cache to target directory") shutil.copyfile(cache_path, out_path) progress.remove_task(task)
async def _my_stub(theme: Theme, progress: rich.progress.Progress): first = random.randint(1, 10) * 0.1 second = random.randint(1, 10) * 0.1 task = progress.add_task(theme.name, total=first + second) await asyncio.sleep(first) progress.advance(task, first) await asyncio.sleep(second) progress.advance(task, second) progress.log(f"Done: {theme}") progress.remove_task(task)
def singularity_download_image(self, container, out_path, cache_path, progress): """Download a singularity image from the web. Use native Python to download the file. Args: container (str): A pipeline's container name. Usually it is of similar format to ``https://depot.galaxyproject.org/singularity/name:version`` out_path (str): The final target output path cache_path (str, None): The NXF_SINGULARITY_CACHEDIR path if set, None if not progress (Progress): Rich progress bar instance to add tasks to. """ log.debug(f"Downloading Singularity image: '{container}'") # Set output path to save file to output_path = cache_path or out_path output_path_tmp = f"{output_path}.partial" log.debug(f"Downloading to: '{output_path_tmp}'") # Set up progress bar nice_name = container.split("/")[-1][:50] task = progress.add_task(nice_name, start=False, total=False, progress_type="download") try: # Delete temporary file if it already exists if os.path.exists(output_path_tmp): os.remove(output_path_tmp) # Open file handle and download with open(output_path_tmp, "wb") as fh: # Disable caching as this breaks streamed downloads with requests_cache.disabled(): r = requests.get(container, allow_redirects=True, stream=True, timeout=60 * 5) filesize = r.headers.get("Content-length") if filesize: progress.update(task, total=int(filesize)) progress.start_task(task) # Stream download for data in r.iter_content(chunk_size=4096): # Check that the user didn't hit ctrl-c if self.kill_with_fire: raise KeyboardInterrupt progress.update(task, advance=len(data)) fh.write(data) # Rename partial filename to final filename os.rename(output_path_tmp, output_path) output_path_tmp = None # Copy cached download if we are using the cache if cache_path: log.debug("Copying {} from cache: '{}'".format( container, os.path.basename(out_path))) progress.update( task, description="Copying from cache to target directory") shutil.copyfile(cache_path, out_path) progress.remove_task(task) except: # Kill the progress bars for t in progress.task_ids: progress.remove_task(t) # Try to delete the incomplete download log.debug( f"Deleting incompleted singularity image download:\n'{output_path_tmp}'" ) if output_path_tmp and os.path.exists(output_path_tmp): os.remove(output_path_tmp) if output_path and os.path.exists(output_path): os.remove(output_path) # Re-raise the caught exception raise