def show(self): rich_print( Text( self.message, style = Style(color = values.C_ERROR), ) )
def meaning_check(self, text: str) -> List[str]: # spell checking and output of text text = Word(text) if text.correct() != text: while True: rich_print( """ [bold yellow]Are you certain this is the right word? If not, I have a correction for you. Would you like me to correct? Press Y for Yes and N for No.[/bold yellow] """ ) choice = self.console.input("[red]>>>[\red] ") if choice.lower() not in ("y", "n"): rich_print( "[yellow]Valid options are:[/yellow] [bold red]Y[/bold red] and [bold red]N[/bold red] only." ) continue if choice.lower() == "y": text = text.correct() break res = [text] if text.definitions: res.extend(text.definitions) return res
def print_template_tree(template_path): directory = _os.path.abspath(template_path) tree = Tree( f":open_file_folder: [link file://{directory}]{directory}", guide_style="bold bright_blue", ) walk_directory(pathlib.Path(directory), tree) rich_print(tree)
def list_venvs(self, pattern, venv_pattern, pythons=None, out=sys.stdout, pipe_mode=False): table = None if not pipe_mode: table = Table( "No.", "Hash", "Name", "Interpreter", "Environment", "Packages", box=None, ) for n, inst in enumerate(self.venv.instances()): if not inst.name or not inst.matches_pattern(pattern): continue if pythons and inst.py not in pythons: continue if not inst.match_venv_pattern(venv_pattern): continue pkgs_str = inst.full_pkg_str env_str = env_to_str(inst.env) if pipe_mode: print( f"[#{n}] {inst.short_hash} {inst.name:12} {env_str} {inst.py} Packages({pkgs_str})" ) else: table.add_row( f"[cyan]#{n}[/cyan]", f"[bold cyan]{hex(hash(inst))[2:9]}[/bold cyan]", f"[bold]{inst.name}[/bold]", Pretty(inst.py), env_str or "--", f"[italic]{pkgs_str}[/italic]", ) if table: rich_print(table)
def print_verbose( files_copied: list[str] | str, files_deleted: list[str] | str, errors_thrown: list[str] | str, warnings_given: list[str] | str, ): """print information on file copies and errors""" print_line: bool = False if files_copied: files_copied = "\n".join(files_copied) rich_print( Panel.fit( f"[green]{files_copied}", title="Files Copied", box=box.SQUARE, ) ) print_line = True elif files_deleted: files_deleted = "\n".join(files_deleted) rich_print( Panel.fit( f"[dark_orange]{files_deleted}", title="Files Deleted", box=box.SQUARE, ) ) print_line = True if warnings_given: if print_line: print() warnings_given = "\n".join(warnings_given) rich_print( Panel.fit( f"[orange1]{warnings_given}", title="Warnings", box=box.SQUARE, ) ) print_line = True if errors_thrown: if print_line: print() errors_thrown = "\n".join(errors_thrown) rich_print( Panel.fit( f"[red]{errors_thrown}", title="Errors", box=box.SQUARE, ) )
def dag_list_dags(args): """Displays dags with or without stats at the command line""" dagbag = DagBag(process_subdir(args.subdir)) if dagbag.import_errors: from rich import print as rich_print rich_print( "[red][bold]Error:[/bold] Failed to load all files. " "For details, run `airflow dags list-import-errors`", file=sys.stderr, ) AirflowConsole().print_as( data=sorted(dagbag.dags.values(), key=lambda d: d.dag_id), output=args.output, mapper=lambda x: { "dag_id": x.dag_id, "filepath": x.filepath, "owner": x.owner, "paused": x.get_is_paused(), }, )
def run_server(args: argparse.Namespace) -> None: if args.request_agent_breakin: args.authenticator.REQUEST_AGENT_BREAKIN = True sshconsole.rule("[bold blue]SSH-MITM - ssh audits made simple", style="blue") rich_print(f'[bold]Version:[/bold] {ssh_mitm_version}') rich_print("[bold]Documentation:[/bold] https://docs.ssh-mitm.at") rich_print("[bold]Issues:[/bold] https://github.com/ssh-mitm/ssh-mitm/issues") sshconsole.rule(style="blue") proxy = SSHProxyServer( args.listen_port, key_file=args.host_key, key_algorithm=args.host_key_algorithm, key_length=args.host_key_length, ssh_interface=args.ssh_interface, scp_interface=args.scp_interface, sftp_interface=args.sftp_interface, sftp_handler=args.sftp_handler, server_tunnel_interface=args.server_tunnel_interface, client_tunnel_interface=args.client_tunnel_interface, authentication_interface=args.auth_interface, authenticator=args.authenticator, transparent=args.transparent, args=args ) if args.banner_name is not None: Transport._CLIENT_ID = args.banner_name # type: ignore proxy.start()
def _built_inventory_tree(inventory_data, nodes_data): """ Helper function to form inventory tree representation :param inventory_data: 'nr.nornir inventory' call dictionary results :param nodes_data: 'grains.item node-name' call dictionary results """ inventory_tree = Tree("[bold cyan]Salt-Master") tree_data = {} # recursively from tree structure for minion_id in sorted(inventory_data.keys()): hosts_data = inventory_data[minion_id].get("ret", {}).get("hosts", {}) # skip if no hosts matched for this minion if not hosts_data: continue # create tree for minion_host if not created already minion_host = nodes_data[minion_id]["ret"]["nodename"] tree_data.setdefault( minion_host, inventory_tree.add("[bold blue]{} node".format(minion_host)) ) # form minion tree object minion_tree = tree_data[minion_host].add( f"[bold green]{minion_id} proxy-minion" ) # add hosts to the tree for host_name, host_data in hosts_data.items(): minion_tree.add( "[bold purple]{name}[/] {ip}; platform: {platform}; groups: {groups}".format( name=host_data["name"], ip=host_data["hostname"], platform=host_data["platform"], groups=", ".join(host_data["groups"] or ["None"]), ) ) rich_print(inventory_tree) return ""
def print(self, solution: MazeSolution = None): if not len(self.map) > 0: return top_border = f'{BorderCharacters.TOP.LEFT.value}{BorderCharacters.TOP.MIDDLE.value * len(self.map)}{BorderCharacters.TOP.RIGHT.value}' rich_print(f'[grey62]{top_border}[/grey62]') # For Each Row for y, row in enumerate(self.map): row_chars = [ f'[grey62]{BorderCharacters.SIDE.LEFT.value}[/grey62]' ] # For Each Cell for x, space in enumerate(row): # Map the raw character to the visualized representation char = Maze.map_character_type(space) # Apply colors based upon space type if char == MazeCharacters.WALL: row_chars.append(f'[dark_red]{char.value}[/dark_red]') elif char == MazeCharacters.START: row_chars.append(f'[sea_green2]{char.value}[/sea_green2]') elif char == MazeCharacters.END: row_chars.append(f'[orange3]{char.value}[/orange3]') elif solution and y in solution and x in solution[y]: row_chars.append( f'[black on steel_blue]{MazeCharacters[DirectionCharacters(solution[y][x]).name].value}[/black on steel_blue]' ) else: row_chars.append(char.value) row_chars.append( f'[grey62]{BorderCharacters.SIDE.RIGHT.value}[/grey62]') rich_print(''.join(row_chars)) bottom_border = f'{BorderCharacters.BOTTOM.LEFT.value}{BorderCharacters.BOTTOM.MIDDLE.value * len(self.map)}{BorderCharacters.BOTTOM.RIGHT.value}' rich_print(f'[grey62]{bottom_border}[/grey62]')
def webserver(args): """Starts Airflow Webserver""" print(settings.HEADER) # Check for old/insecure config, and fail safe (i.e. don't launch) if the config is wildly insecure. if conf.get('webserver', 'secret_key') == 'temporary_key': from rich import print as rich_print rich_print( "[red][bold]ERROR:[/bold] The `secret_key` setting under the webserver config has an insecure " "value - Airflow has failed safe and refuses to start. Please change this value to a new, " "per-environment, randomly generated string, for example using this command `[cyan]openssl rand " "-hex 30[/cyan]`", file=sys.stderr, ) sys.exit(1) access_logfile = args.access_logfile or conf.get('webserver', 'access_logfile') error_logfile = args.error_logfile or conf.get('webserver', 'error_logfile') access_logformat = args.access_logformat or conf.get( 'webserver', 'access_logformat') num_workers = args.workers or conf.get('webserver', 'workers') worker_timeout = args.worker_timeout or conf.get( 'webserver', 'web_server_worker_timeout') ssl_cert = args.ssl_cert or conf.get('webserver', 'web_server_ssl_cert') ssl_key = args.ssl_key or conf.get('webserver', 'web_server_ssl_key') if not ssl_cert and ssl_key: raise AirflowException( 'An SSL certificate must also be provided for use with ' + ssl_key) if ssl_cert and not ssl_key: raise AirflowException( 'An SSL key must also be provided for use with ' + ssl_cert) if args.debug: print( f"Starting the web server on port {args.port} and host {args.hostname}." ) app = create_app(testing=conf.getboolean('core', 'unit_test_mode')) app.run( debug=True, use_reloader=not app.config['TESTING'], port=args.port, host=args.hostname, ssl_context=(ssl_cert, ssl_key) if ssl_cert and ssl_key else None, ) else: pid_file, stdout, stderr, log_file = setup_locations( "webserver", args.pid, args.stdout, args.stderr, args.log_file) # Check if webserver is already running if not, remove old pidfile check_if_pidfile_process_is_running(pid_file=pid_file, process_name="webserver") print( textwrap.dedent(f'''\ Running the Gunicorn Server with: Workers: {num_workers} {args.workerclass} Host: {args.hostname}:{args.port} Timeout: {worker_timeout} Logfiles: {access_logfile} {error_logfile} Access Logformat: {access_logformat} =================================================================''' )) run_args = [ sys.executable, '-m', 'gunicorn', '--workers', str(num_workers), '--worker-class', str(args.workerclass), '--timeout', str(worker_timeout), '--bind', args.hostname + ':' + str(args.port), '--name', 'airflow-webserver', '--pid', pid_file, '--config', 'python:airflow.www.gunicorn_config', ] if args.access_logfile: run_args += ['--access-logfile', str(args.access_logfile)] if args.error_logfile: run_args += ['--error-logfile', str(args.error_logfile)] if args.access_logformat and args.access_logformat.strip(): run_args += ['--access-logformat', str(args.access_logformat)] if args.daemon: run_args += ['--daemon'] if ssl_cert: run_args += ['--certfile', ssl_cert, '--keyfile', ssl_key] run_args += ["airflow.www.app:cached_app()"] gunicorn_master_proc = None def kill_proc(signum, _): log.info("Received signal: %s. Closing gunicorn.", signum) gunicorn_master_proc.terminate() with suppress(TimeoutError): gunicorn_master_proc.wait(timeout=30) if gunicorn_master_proc.poll() is not None: gunicorn_master_proc.kill() sys.exit(0) def monitor_gunicorn(gunicorn_master_pid: int): # Register signal handlers signal.signal(signal.SIGINT, kill_proc) signal.signal(signal.SIGTERM, kill_proc) # These run forever until SIG{INT, TERM, KILL, ...} signal is sent GunicornMonitor( gunicorn_master_pid=gunicorn_master_pid, num_workers_expected=num_workers, master_timeout=conf.getint('webserver', 'web_server_master_timeout'), worker_refresh_interval=conf.getint('webserver', 'worker_refresh_interval', fallback=30), worker_refresh_batch_size=conf.getint( 'webserver', 'worker_refresh_batch_size', fallback=1), reload_on_plugin_change=conf.getboolean( 'webserver', 'reload_on_plugin_change', fallback=False), ).start() if args.daemon: # This makes possible errors get reported before daemonization os.environ['SKIP_DAGS_PARSING'] = 'True' app = create_app(None) os.environ.pop('SKIP_DAGS_PARSING') handle = setup_logging(log_file) base, ext = os.path.splitext(pid_file) with open(stdout, 'w+') as stdout, open(stderr, 'w+') as stderr: ctx = daemon.DaemonContext( pidfile=TimeoutPIDLockFile(f"{base}-monitor{ext}", -1), files_preserve=[handle], stdout=stdout, stderr=stderr, ) with ctx: subprocess.Popen(run_args, close_fds=True) # Reading pid of gunicorn master as it will be different that # the one of process spawned above. while True: sleep(0.1) gunicorn_master_proc_pid = read_pid_from_pidfile( pid_file) if gunicorn_master_proc_pid: break # Run Gunicorn monitor gunicorn_master_proc = psutil.Process( gunicorn_master_proc_pid) monitor_gunicorn(gunicorn_master_proc.pid) else: with subprocess.Popen(run_args, close_fds=True) as gunicorn_master_proc: monitor_gunicorn(gunicorn_master_proc.pid)
def quit(self) -> None: rich_print("[bold blue]Bye Bye![/bold blue]") logging.info( f"Terminating script at {time.asctime(time.localtime(time.time()))}" ) exit()
def amaq_queries(self) -> None: logging.info("Running Amaq " + time.asctime(time.localtime(time.time()))) rich_print("[bold blue]Hello, I'm Amaq[/bold blue]") rich_print("[bold blue]Let's get to the business.[\bold blue]") while True: text = self.console.input("[red]>>>[/red] ") if text.strip() == "": continue logging.info("User input: " + text) user_in = Word(clean_text(text)) if user_in in STOP_WORDS: self.quit() if FAQS.get(user_in): logging.info("User asked about question") rich_print(FAQS.get(user_in)) continue meanings = self.meaning_check(user_in) req = meanings[0] logging.info(f"Writing the result onto the output screen for {req}") del meanings[0] if not meanings: rich_print("[yellow]Sorry, I didn't found any match.[/yellow]") logging.info(f"Amaq failed to find any match for {req}") continue rich_print(f"[blue]{req.capitalize()}[/blue] stands for : \n") for meaning in meanings: rich_print(f"[green]*** {meaning}[/green]")
def render(content, rich=True): if rich: rich_print(content) else: print(content)
def print(content): rich_print(content)
def start(): # Setup config for this package logger_config = config_parser.config['logger'] logging.config.dictConfig(logger_config) _log = logging.getLogger(__name__) # Read arguments for DockerENT application parser = argparse.ArgumentParser( prog='Find the vulnerabilities hidden in your running container(s).') docker_args_group = parser.add_argument_group() docker_args_group.add_argument( '-d', '--docker', nargs='?', dest='docker_container', const='all', help='Run scan against the running container.') docker_args_group.add_argument( '-p', '--plugins', nargs='?', dest='docker_plugins', const='all', help='Run scan with only specified plugins.') docker_nw_args_group = parser.add_argument_group() docker_nw_args_group.add_argument( '-d-nw', '--docker-network', nargs='?', dest='docker_network', const='all', help='Run scan against running docker-network.') docker_args_group.add_argument( '-p-nw', '--nw-plugins', nargs='?', dest='docker_nw_plugins', const='all', help='Run scan with only specified plugins.') parser.add_argument( '-w', '--web-app', dest='web_app', action='store_true', default=False, help='Run DockerENT in WebApp mode. ' 'If this parameter is enabled, other command line flags will be ignored.' ) parser.add_argument('-n', '--process', nargs='?', dest='process_count', default=2, type=int, help='Run scans in parallel (Process pool count).') parser.add_argument('-a', '--audit', dest='audit', action='store_true', default=False, help='Flag to check weather to audit results or not.') output_plugin = parser.add_argument_group() output_plugin.add_argument('-o', '--output', nargs='?', dest='output', default='file', type=str, help='Output plugin to write data to.') args = parser.parse_args() process_count = args.process_count output = args.output audit = args.audit webapp = args.web_app docker_containers = args.docker_container docker_plugins = args.docker_plugins docker_nws = args.docker_network docker_nw_plugins = args.docker_nw_plugins # Register Signal Handler for graceful exit in case of Web application def sigterm_handler(_signo, _stack_frame): """Signal handler.""" rich_print("[bold green]Thanks for using DockerENT[/bold green]") sys.exit(0) signal.signal(signal.SIGINT, sigterm_handler) # Start DockerENT # If webapp, start Streamlit else cli application. if webapp: rich_print('[bold green]Starting web application ...[/bold green]') web_app_file_name = os.path.dirname(DockerENT.__file__) + '/web_app.py' web_app_cmd = "streamlit run " + web_app_file_name with subprocess.Popen(web_app_cmd.split(" ")) as web_process: rich_print(web_process.stdout.read()) else: controller.main(docker_containers=docker_containers, docker_plugins=docker_plugins, docker_nws=docker_nws, docker_nw_plugins=docker_nw_plugins, process_count=process_count, audit=audit, output=output)
def sigterm_handler(_signo, _stack_frame): """Signal handler.""" rich_print("[bold green]Thanks for using DockerENT[/bold green]") sys.exit(0)