def list_stacks(stack_ref: List[str], all: bool, remote: str, region: str, watch: int, output: str): """List Lizzy stacks""" lizzy = setup_lizzy_client(remote) stack_references = parse_stack_refs(stack_ref) while True: rows = [] for stack in lizzy.get_stacks(stack_references, region=region): creation_time = dateutil.parser.parse(stack['creation_time']) rows.append({'stack_name': stack['stack_name'], 'version': stack['version'], 'status': stack['status'], 'creation_time': creation_time.timestamp(), 'description': stack['description']}) rows.sort(key=lambda x: (x['stack_name'], x['version'])) with OutputFormat(output): print_table( 'stack_name version status creation_time description'.split(), rows, styles=STYLES, titles=TITLES) if watch: # pragma: no cover time.sleep(watch) click.clear() else: break
def run(commands, shell='/bin/bash', prompt_func=get_default_prompt, speed=1, test_mode=False): echof("We'll do it live!", fg='red', bold=True) echof('STARTING SESSION: Press ESC at any time to exit.', fg='yellow', bold=True) click.pause() click.clear() aliases, envvars = [], [] for line in commands: command = line.strip() if not command: continue if command.startswith('#'): # Parse comment magic match = OPTION_RE.match(command) if match: option, arg = match.group('option'), match.group('arg') if option == 'prompt': prompt_func = make_prompt_formatter(arg) elif option == 'shell': shell = arg elif option == 'alias': aliases.append(arg) elif option == 'env': envvars.append(arg) elif option == 'speed': speed = int(arg) continue magictype(command, shell, prompt_func=prompt_func, aliases=aliases, envvars=envvars, speed=speed, test_mode=test_mode) prompt = prompt_func() echo(prompt + ' ', nl=False) wait_for(RETURNS) echof("FINISHED SESSION", fg='yellow', bold=True)
def compose(cards, cards_per_page, confirm): click.clear() print '\n' print click.style(' ', bg='blue') print click.style(' BINGO CARD COMPOSER ', bg='blue', fg='white') print click.style(' ', bg='blue') print '\n' cards = os.path.abspath(cards) # Let user confirm his options d = { 'cards': cards, 'cards_per_page': cards_per_page } d = ['%s %s' % (click.style('%s' % k, fg='red'), d[k]) for k in d.keys()] d = '\n'.join(d) if confirm: click.confirm(click.style('\nDo you want to continue with the following options?', fg='green') + '\n\n%s\n' % d, abort=True) else: print click.style('\nWriting', fg='green') + '\n\n%s\n' % d # Compose pdf. composer = Composer(cards, cards_per_page) composer.write() print '\n' print click.style(' ', bg='green') print click.style(' SUCCESS ', bg='green', fg='black') print click.style(' ', bg='green') print '\n'
def watching(w, watch, max_count=None, clear=True): """ >>> len(list(watching(True, 1, 0))) 1 >>> len(list(watching(True, 1, 1))) 2 >>> len(list(watching(True, None, 0))) 1 """ if w and not watch: watch = 2 if watch and clear: click.clear() yield 0 if max_count is not None and max_count < 1: return counter = 1 while watch and counter <= (max_count or counter): time.sleep(watch) counter += 1 if clear: click.clear() yield 0
def get_routingconfig_subdomain(): click.clear() message = """ You might want to override the default subdomain used for exposed routes. If you don't know what this is, use the default value. """ click.echo(message) return click.prompt('New default subdomain (ENTER for none)', default='')
def main(): """Summarize text.""" # todo: use argparse or click or something like that input_dict = get_input() summarizer = Summarizer() result = summarizer.summarize( input_dict['text'], input_dict['title'], 'Undefined', 'Undefined', ) result = summarizer.sortScore(result) result = summarizer.sortSentences(result[:30]) # todo: paginate this output click.clear() summary = "Summary of '%s':\n\n" % input_dict['title'] for r in result: summary += r['sentence'] + "\n\n" summary += "Press [q] to exit." click.echo_via_pager(summary) click.clear()
def main(user, flag): global _user global _league _user = user _league = user.get_league() if flag: click.clear() print('%s Managerial office' % _user.get_team().get_name()) print('-'.ljust(39,'-')) nav = click.prompt('What would you like to do?', type=click.Choice(['help','inbox','schedule','squad','standings','personal', 'save', 'exit'])) if nav == 'help': print('\nWhile in your office, you have a variety of resources available to you. You may:\n\n' 'inbox : access any new mail you\'ve received, whether they be newsletters, injury news, player communications, or transfer offers\n' 'schedule : take a look at upcoming games and past results of both your team and other teams in the league\n' 'squad : check on how your players are doing, including their stats based on recent training sessions\n' 'standings : see how your team ranks up on the table, along with other useful information and stats\n' 'personal : see your own personal stats and information\n' 'save : save your in-game progress\n' 'exit : exit out of the game, although why would you do that?\n') main(_user, False) elif nav == 'exit': pass elif nav == 'inbox': inbox() elif nav == 'schedule': schedule() elif nav == 'squad': squad() elif nav == 'standings': standings() elif nav == 'personal': personal() elif nav == 'save': save()
def check_hosts_config(oo_cfg): click.clear() masters = [host for host in oo_cfg.hosts if host.master] if len(masters) > 1: master_lb = [host for host in oo_cfg.hosts if host.master_lb] if len(master_lb) > 1: click.echo('More than one Master load balancer specified. Only one is allowed.') sys.exit(0) elif len(master_lb) == 1: if master_lb[0].master or master_lb[0].node: click.echo('The Master load balancer is configured as a master or node. Please correct this.') sys.exit(0) else: message = """ No HAProxy given in config. Either specify one or provide a load balancing solution of your choice to balance the master API (port 8443) on all master hosts. https://docs.openshift.org/latest/install_config/install/advanced_install.html#multiple-masters """ confirm_continue(message) nodes = [host for host in oo_cfg.hosts if host.node] if len(masters) == len(nodes): message = """ No dedicated Nodes specified. By default, colocated Masters have their Nodes set to unschedulable. Continuing at this point will label all nodes as schedulable. """ confirm_continue(message) return
def cli(recursive, path): if not path: path = '.' click.clear() cprint(figlet_format('lsBranch', width=120), 'red') click.secho(' '*25 + 'by Ivan Arar', fg='red') lsbranch = lsBranch(path=path) click.echo() click.echo('-'*lsbranch.terminal_size) click.echo() lsbranch.search(recursive=recursive) click.echo('-'*lsbranch.terminal_size) click.echo() click.secho('Went through ' + str(lsbranch.counter_all()) + ' directories and found ' + str(lsbranch.counter_git()) + ' git repositories!', fg='blue', bold=True, blink=True) click.echo() click.echo('-'*lsbranch.terminal_size) click.echo()
def cli(): q = click.prompt('请输入线路名', value_proc=str) lines = BeijingBus.search_lines(q) for index, line in enumerate(lines): click.echo() click.secho('[%s] %s' % (index+1, line.name), bold=True, underline=True) station_names = [s.name for s in line.stations] click.echo() click.echo('站点列表:%s' % ','.join(station_names)) click.echo() q = click.prompt('请从结果中选择线路编号', type=int) line = lines[q-1] click.clear() click.echo('你选择了 %s,下面请选择站点' % line.name) click.echo() for index, station in enumerate(line.stations): click.echo('[%s] %s' % (index+1, station.name)) click.echo() q = click.prompt('请从结果中选择线路编号', type=int) while True: echo_realtime_data(line, q) time.sleep(5)
def print_status_loop(): while True: click.clear() colums = 'header', 'state', 'description' rows = map(juxt(*colums), incidents.values()) click.echo(tabulate.tabulate(rows, headers=colums)) time.sleep(1)
def show(): click.clear() term_width, term_height = click.get_terminal_size() canvas = make_canvas(term_width, term_height - 1) chart_width = min(20, term_width - 20) m = metrics.get_all_metrics() max_count = max(m.value for m in m.values()) scale = get_scale(max_count) ratio = chart_width * 1.0 / scale t = [] headers = ['param', 'value', 'chart'] keys = sorted(m.keys()) for key in keys: metric = m[key] count = metric.value color = green if metric.mood == metrics.MOOD_HAPPY else red chart = color('|' + u'█' * int(ratio * count)) t.append([key, count, chart]) place(canvas, LOGO2, 0, 0) s = tabulate(t, headers=headers, tablefmt='simple') place(canvas, s, 25, 0) render(canvas) time.sleep(cfg.CONF.interval)
def _print_stats_dashboard(self, statistics): if self.interval: click.clear() click.echo() click.echo("Django RQ CLI Dashboard") click.echo() self._print_separator() # Header click.echo( """| %-15s|%10s |%10s |%10s |%10s |%10s |""" % ("Name", "Queued", "Active", "Deferred", "Finished", "Workers") ) self._print_separator() # Print every queues in a row for queue in statistics["queues"]: click.echo( """| %-15s|%10s |%10s |%10s |%10s |%10s |""" % (queue["name"], queue["jobs"], queue["started_jobs"], queue["deferred_jobs"], queue["finished_jobs"], queue["workers"]) ) self._print_separator() if self.interval: click.echo() click.echo("Press 'Ctrl+c' to quit")
def collect_hosts(): """ Collect host information from user. This will later be filled in using ansible. Returns: a list of host information collected from the user """ click.clear() click.echo('***Host Configuration***') message = """ The OpenShift Master serves the API and web console. It also coordinates the jobs that have to run across the environment. It can even run the datastore. For wizard based installations the database will be embedded. It's possible to change this later using etcd from Red Hat Enterprise Linux 7. Any Masters configured as part of this installation process will also be configured as Nodes. This is so that the Master will be able to proxy to Pods from the API. By default this Node will be unscheduleable but this can be changed after installation with 'oadm manage-node'. The OpenShift Node provides the runtime environments for containers. It will host the required services to be managed by the Master. http://docs.openshift.com/enterprise/latest/architecture/infrastructure_components/kubernetes_infrastructure.html#master http://docs.openshift.com/enterprise/3.0/architecture/infrastructure_components/kubernetes_infrastructure.html#node """ click.echo(message) hosts = [] more_hosts = True ip_regex = re.compile(r'^\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}$') while more_hosts: host_props = {} hostname_or_ip = click.prompt('Enter hostname or IP address:', default='', value_proc=validate_prompt_hostname) if ip_regex.match(hostname_or_ip): host_props['ip'] = hostname_or_ip else: host_props['hostname'] = hostname_or_ip host_props['master'] = click.confirm('Will this host be an OpenShift Master?') host_props['node'] = True rpm_or_container = click.prompt('Will this host be RPM or Container based (rpm/container)?', type=click.Choice(['rpm', 'container']), default='rpm') if rpm_or_container == 'container': host_props['containerized'] = True else: host_props['containerized'] = False host = Host(**host_props) hosts.append(host) more_hosts = click.confirm('Do you want to add additional hosts?') return hosts
def navigate_to(self, going): logger.debug('navigating to: %s' % going) self.session.process_events() going.initialize() while self.navigating: click.clear() self.print_header() self.print_menu(going.get_ui()) response = going.get_response() if callable(response): response = response() logger.debug('Got response %s after evaluation' % response) if response == responses.QUIT: click.clear() click.echo('Thanks, bye!') self.navigating = False return elif response == responses.UP: break elif response == responses.NOOP: continue elif response == responses.PLAYER: self.navigate_to(self.player) elif response != going: self.navigate_to(response) # This happens when the `going` instance gets control again. We # don't want to remember the query and we want to rebuild the # menu's options # (and possibly something else?) going.initialize()
def mantenimiento(tipo): click.clear() if tipo == 'ope': click.echo(click.style('=> Seleccionado modo OPERACIONAL', fg='green', bold=True, reverse=True)) leer_archivo('./operational') else: click.secho('=> Seleccionado modo SIMULACION', fg='yellow', bold=True, reverse=True) leer_archivo('./simulation')
def print_header(artist, album, current, total): click.clear() title = u"{0} - {1}".format(artist, album) progress = u"{0}/{1}".format(current + 1, total) term_width, term_height = click.get_terminal_size() pad_width = (term_width - len(title) - len(progress)) - 10 header_template = u"=== {title} {pad} {progress} ===" click.secho(header_template.format(title=title, pad="=" * pad_width, progress=progress), bg="blue", fg="white")
def collect_new_nodes(oo_cfg): click.clear() click.echo('*** New Node Configuration ***') message = """ Add new nodes here """ click.echo(message) return collect_hosts(oo_cfg, masters_set=True, print_summary=False)
def collect_new_nodes(): click.clear() click.echo('***New Node Configuration***') message = """ Add new nodes here """ click.echo(message) return collect_hosts()
def setup(message='Hello. Would you like to continue with setup?'): click.clear() welcome.setup_text() click_helper = ClickHelper() if click_helper.yes_no(message): click.echo('Setup complete.') else: click.echo('Setup canceled.')
def get_ansible_ssh_user(): click.clear() message = """ This installation process involves connecting to remote hosts via ssh. Any account may be used. However, if a non-root account is used, then it must have passwordless sudo access. """ click.echo(message) return click.prompt('User for ssh access', default='root')
def collect_hosts(master_set=False): """ Collect host information from user. This will later be filled in using ansible. Returns: a list of host information collected from the user """ click.clear() click.echo("***Host Configuration***") message = """ The OpenShift Master serves the API and web console. It also coordinates the jobs that have to run across the environment. It can even run the datastore. For wizard based installations the database will be embedded. It's possible to change this later using etcd from Red Hat Enterprise Linux 7. Any Masters configured as part of this installation process will also be configured as Nodes. This is so that the Master will be able to proxy to Pods from the API. By default this Node will be unscheduleable but this can be changed after installation with 'oadm manage-node'. The OpenShift Node provides the runtime environments for containers. It will host the required services to be managed by the Master. http://docs.openshift.com/enterprise/latest/architecture/infrastructure_components/kubernetes_infrastructure.html#master http://docs.openshift.com/enterprise/latest/architecture/infrastructure_components/kubernetes_infrastructure.html#node """ click.echo(message) hosts = [] more_hosts = True while more_hosts: host_props = {} hostname_or_ip = click.prompt("Enter hostname or IP address:", default="", value_proc=validate_prompt_hostname) host_props["connect_to"] = hostname_or_ip if not master_set: is_master = click.confirm("Will this host be an OpenShift Master?") host_props["master"] = is_master master_set = is_master host_props["node"] = True # TODO: Reenable this option once container installs are out of tech preview # rpm_or_container = click.prompt('Will this host be RPM or Container based (rpm/container)?', # type=click.Choice(['rpm', 'container']), # default='rpm') # if rpm_or_container == 'container': # host_props['containerized'] = True # else: # host_props['containerized'] = False host_props["containerized"] = False host = Host(**host_props) hosts.append(host) more_hosts = click.confirm("Do you want to add additional hosts?") return hosts
def generate(output, count): output = '../{}.txt'.format(output) with open(output, 'w') as fp: for i in xrange(count): name = random.choice([style1, style2])() fp.write(re.sub(' +', ' ', name).lower() + '\n') click.clear() print '{} names generated'.format(i)
def _print(self, limit): click.clear() cprint(figlet_format('Croatian Tax Debtors', width=120), 'red') click.echo() categories = self.data['counters'].keys() number_of_categories = len(categories) screen_line_parts = [] for cat in self.data['width'].keys(): screen_line_parts.append('-'*(self.data['width'][cat]+16)) screen_line_parts.append(' '*4) screen_line = ''.join(screen_line_parts) if self._all_count: all_count = '/' + str(self._all_count) else: all_count = '' for i, category in enumerate(categories): cat_width = self.data['width'][category]+20 nl = True if i+1 >= number_of_categories else False color = self.data['colors'][category] category += ' (' + str(self.data['counters'][category]) + all_count + ')' click.secho(category.upper() + (' '*(cat_width-len(category))), nl=nl, fg=color) click.echo(screen_line) cat_line_width = {category: 0 for category in categories} for j in range(int(limit)): for i, category in enumerate(categories): if len(self.data['toplist'][category]) <= j: click.echo(' '*cat_line_width[category]) else: cat_width = self.data['width'][category] nl = True if i+1 >= number_of_categories else False color = self.data['colors'][category] debtor = self.data['toplist'][category][j] dots = '.'*(cat_width-len(debtor[0])) line = click.style(debtor[0], fg=color) + ' ' + \ click.style(dots, dim=True, fg=color) + ' ' + \ click.style(debtor[1], fg=color) line_len = len(debtor[0])+len(dots)+len(debtor[1])+2 click.echo(line + ' '*((cat_width+20)-line_len), nl=nl) cat_line_width[category] = len(line) click.echo(screen_line) click.echo()
def watching(watch: int): if watch: click.clear() yield 0 if watch: while True: time.sleep(watch) click.clear() yield 0
def refresh(interval, func, *args): while True: if interval: click.clear() func(*args) if interval: time.sleep(interval) else: break
def run_command(self): if self.clear: click.clear() if self.show: click.secho('$ {0}'.format(self.command), fg='cyan') exit_code = subprocess.call(self.command, shell=True) if self.verbose: click.echo("! Command '{0}' exited [{1}]".format( self.command, exit_code))
def _print_cli_header(build_id, commit_summary, url_build_report): """ Print a build-process header to the console. """ click.clear() _msg('Build id:', build_id) # Identify the build _msg('Last commit:', commit_summary) # Provide some human-scale context. _msg('Report:', url_build_report) # Link to more detailed info.
def get_ansible_ssh_user(): click.clear() message = """ This installation process will involve connecting to remote hosts via ssh. Any account may be used however if a non-root account is used it must have passwordless sudo access. """ click.echo(message) return click.prompt("User for ssh access", default="root")
def collect_new_nodes(oo_cfg): click.clear() click.echo("*** New Node Configuration ***") message = """ Add new nodes here """ click.echo(message) new_nodes, _ = collect_hosts(oo_cfg, existing_env=True, masters_set=True, print_summary=False) return new_nodes
def run(exchange: str, symbol: str, start_date_str: str, skip_confirmation=False): try: start_timestamp = jh.arrow_to_timestamp( arrow.get(start_date_str, 'YYYY-MM-DD')) except: raise ValueError( 'start_date must be a string representing a date before today. ex: 2020-01-17' ) # more start_date validations today = arrow.utcnow().floor('day').timestamp * 1000 if start_timestamp == today: raise ValueError( "Today's date is not accepted. start_date must be a string a representing date BEFORE today." ) elif start_timestamp > today: raise ValueError( "Future's date is not accepted. start_date must be a string a representing date BEFORE today." ) click.clear() symbol = symbol.upper() until_date = arrow.utcnow().floor('day') start_date = arrow.get(start_timestamp / 1000) days_count = jh.date_diff_in_days(start_date, until_date) candles_count = days_count * 1440 exchange = exchange.title() try: driver: CandleExchange = drivers[exchange]() except KeyError: raise ValueError('entered exchange is not supported') loop_length = int(candles_count / driver.count) + 1 time_to_finish = loop_length * driver.sleep_time / 60 # ask for confirmation if not skip_confirmation: click.confirm( 'Importing {} days candles from "{}" for "{}". Maximum time it\'ll take ' 'to finish:"{} minutes" (duplicates will be skipped). All good?'. format(days_count, exchange, symbol, time_to_finish), abort=True, default=True) with click.progressbar(length=loop_length, label='Importing candles...') as progressbar: for _ in range(candles_count): temp_start_timestamp = start_date.timestamp * 1000 temp_end_timestamp = temp_start_timestamp + (driver.count - 1) * 60000 # to make sure it won't try to import candles from the future! LOL if temp_start_timestamp > jh.now(): break # prevent duplicates calls to boost performance count = Candle.select().where( Candle.timestamp.between(temp_start_timestamp, temp_end_timestamp), Candle.symbol == symbol, Candle.exchange == exchange).count() already_exists = count == driver.count if not already_exists: # it's today's candles if temp_end_timestamp < now if temp_end_timestamp > jh.now(): temp_end_timestamp = arrow.utcnow().floor( 'minute').timestamp * 1000 - 60000 # fetch from market candles = driver.fetch(symbol, temp_start_timestamp) if not len(candles): click.clear() first_existing_timestamp = driver.get_starting_time(symbol) # if driver can't provide accurate get_starting_time() if first_existing_timestamp is None: raise CandleNotFoundInExchange( 'No candles exists in the market for this day: {} \n' 'Try another start_date'.format( jh.timestamp_to_time(temp_start_timestamp) [:10], )) # handle when there's missing candles during the period if temp_start_timestamp > first_existing_timestamp: # see if there are candles for the same date for the backup exchange, # if so, get those, if not, download from that exchange. driver.init_backup_exchange() if driver.backup_exchange is not None: candles = _get_candles_from_backup_exchange( exchange, driver.backup_exchange, symbol, temp_start_timestamp, temp_end_timestamp) else: if not skip_confirmation: print( jh.color( 'No candle exists in the market for {}\n'. format( jh.timestamp_to_time( temp_start_timestamp)[:10]), 'yellow')) click.confirm( 'First present candle is since {}. Would you like to continue?' .format( jh.timestamp_to_time( first_existing_timestamp)[:10]), abort=True, default=True) run( exchange, symbol, jh.timestamp_to_time(first_existing_timestamp) [:10], True) return # fill absent candles (if there's any) candles = _fill_absent_candles(candles, temp_start_timestamp, temp_end_timestamp) # store in the database if skip_confirmation: _insert_to_database(candles) else: threading.Thread(target=_insert_to_database, args=[candles]).start() # add as much as driver's count to the temp_start_time start_date = start_date.shift(minutes=driver.count) progressbar.update(1) # sleep so that the exchange won't get angry at us if not already_exists: time.sleep(driver.sleep_time)
def draw(self, notification): click.clear() factory = self.find_factory(notification) click.echo(factory(self._size, notification), nl=False)
def pull_request( ctx, repo_dir, force_push, merge_after_pipeline, open_url, github_token, gitlab_token, reviewers, ): """ A command to simplify pull request creation. 1. Fetch remote changes 2. Push to a remote branch 3. Create pull request with Github or Gitlab """ repo = Repo(repo_dir, search_parent_directories=True) ctx.obj = repo click.clear() remote_url = get_remote_url(ctx.obj) giturl = giturlparse.parse(remote_url) if not giturl.github and not giturl.gitlab: raise click.UsageError( f"This command only supports Github & Gitlab - remote: {remote_url}" ) repo.remote().fetch() with Halo(text="Rebasing on master") as h: repo.git.rebase("origin/master") h.succeed() with Halo(text="Pushing changes", spinner="dots4") as h: try: repo.git.push(repo.remote().name, repo.active_branch.name, force=force_push) except GitCommandError as ex: if "your current branch is behind" in ex.stderr: h.stop() click.confirm( ("Your branch is behind the remote. You can either " "abort or force push. Do you want to force push?"), abort=True, ) repo.git.push(repo.remote().name, repo.active_branch.name, force=True) h.start() else: raise ex h.succeed() with Halo(text="Creating pull-request", spinner="dots5") as h: if giturl.github: pull_request_url = create_github_pull_request() elif giturl.gitlab: pull_request_url = create_gitlab_pull_request() h.succeed() click.echo(click.style(pull_request_url, fg="green", bold=True)) if open_url: click.launch(pull_request_url)
def run(start_date: str, finish_date: str, candles=None, chart=False, tradingview=False, csv=False, json=False): # clear the screen if not jh.should_execute_silently(): click.clear() # validate routes validate_routes(router) # initiate candle store store.candles.init_storage(5000) # load historical candles if candles is None: print('loading candles...') candles = load_candles(start_date, finish_date) click.clear() if not jh.should_execute_silently(): # print candles table key = '{}-{}'.format(config['app']['considering_candles'][0][0], config['app']['considering_candles'][0][1]) table.key_value(stats.candles(candles[key]['candles']), 'candles', alignments=('left', 'right')) print('\n') # print routes table table.multi_value(stats.routes(router.routes)) print('\n') # print guidance for debugging candles if jh.is_debuggable('trading_candles') or jh.is_debuggable( 'shorter_period_candles'): print( ' Symbol | timestamp | open | close | high | low | volume' ) # run backtest simulation simulator(candles) if not jh.should_execute_silently(): # print trades statistics if store.completed_trades.count > 0: print('\n') table.key_value(report.portfolio_metrics(), 'Metrics', alignments=('left', 'right')) print('\n') # save logs store_logs(json, tradingview, csv) if chart: charts.portfolio_vs_asset_returns() else: print(jh.color('No trades were made.', 'yellow'))
def get_missing_info_from_user(oo_cfg): """ Prompts the user for any information missing from the given configuration. """ click.clear() message = """ Welcome to the OpenShift Enterprise 3 installation. Please confirm that following prerequisites have been met: * All systems where OpenShift will be installed are running Red Hat Enterprise Linux 7. * All systems are properly subscribed to the required OpenShift Enterprise 3 repositories. * All systems have run docker-storage-setup (part of the Red Hat docker RPM). * All systems have working DNS that resolves not only from the perspective of the installer, but also from within the cluster. When the process completes you will have a default configuration for masters and nodes. For ongoing environment maintenance it's recommended that the official Ansible playbooks be used. For more information on installation prerequisites please see: https://docs.openshift.com/enterprise/latest/admin_guide/install/prerequisites.html """ confirm_continue(message) click.clear() if not oo_cfg.deployment.variables.get('ansible_ssh_user', False): oo_cfg.deployment.variables['ansible_ssh_user'] = get_ansible_ssh_user( ) click.clear() if not oo_cfg.settings.get('variant', ''): variant, version = get_variant_and_version() oo_cfg.settings['variant'] = variant.name oo_cfg.settings['variant_version'] = version.name oo_cfg.settings['variant_subtype'] = version.subtype click.clear() if not oo_cfg.deployment.hosts: oo_cfg.deployment.hosts, roles = collect_hosts(oo_cfg) set_infra_nodes(oo_cfg.deployment.hosts) for role in roles: oo_cfg.deployment.roles[role] = Role(name=role, variables={}) click.clear() if 'master_routingconfig_subdomain' not in oo_cfg.deployment.variables: oo_cfg.deployment.variables['master_routingconfig_subdomain'] = \ get_routingconfig_subdomain() click.clear() # Are any proxy vars already presisted? proxy_vars = ['proxy_exclude_hosts', 'proxy_https', 'proxy_http'] # Empty list if NO proxy vars were presisted saved_proxy_vars = [ pv for pv in proxy_vars if oo_cfg.deployment.variables.get(pv, 'UNSET') is not 'UNSET' ] INSTALLER_LOG.debug("Evaluated proxy settings, found %s presisted values", len(saved_proxy_vars)) current_version = parse_version( oo_cfg.settings.get('variant_version', '0.0')) min_version = parse_version('3.2') # No proxy vars were saved and we are running a version which # recognizes proxy parameters. We must prompt the user for values # if this conditional is true. if not saved_proxy_vars and current_version >= min_version: INSTALLER_LOG.debug("Prompting user to enter proxy values") http_proxy, https_proxy, proxy_excludes = get_proxy_hosts_excludes() oo_cfg.deployment.variables['proxy_http'] = http_proxy oo_cfg.deployment.variables['proxy_https'] = https_proxy oo_cfg.deployment.variables['proxy_exclude_hosts'] = proxy_excludes click.clear() return oo_cfg
def run(start_date: str, finish_date: str, candles=None, chart=False, tradingview=False, csv=False, json=False): # clear the screen if not jh.should_execute_silently(): click.clear() # validate routes validate_routes(router) # initiate candle store store.candles.init_storage(5000) # load historical candles if candles is None: print('loading candles...') candles = load_candles(start_date, finish_date) click.clear() if not jh.should_execute_silently(): # print candles table key = '{}-{}'.format(config['app']['considering_candles'][0][0], config['app']['considering_candles'][0][1]) table.key_value(stats.candles(candles[key]['candles']), 'candles', alignments=('left', 'right')) print('\n') # print routes table table.multi_value(stats.routes(router.routes)) print('\n') # print guidance for debugging candles if jh.is_debuggable('trading_candles') or jh.is_debuggable( 'shorter_period_candles'): print( ' Symbol | timestamp | open | close | high | low | volume' ) # run backtest simulation simulator(candles) if not jh.should_execute_silently(): # print trades statistics if store.completed_trades.count > 0: change = [] # calcualte market change for e in router.routes: if e.strategy is None: return first = Candle.select(Candle.close).where( Candle.timestamp == jh.date_to_timestamp(start_date), Candle.exchange == e.exchange, Candle.symbol == e.symbol).first() last = Candle.select(Candle.close).where( Candle.timestamp == jh.date_to_timestamp(finish_date) - 60000, Candle.exchange == e.exchange, Candle.symbol == e.symbol).first() change.append( ((last.close - first.close) / first.close) * 100.0) data = report.portfolio_metrics() data.append( ['Market Change', str(round(np.average(change), 2)) + "%"]) print('\n') table.key_value(data, 'Metrics', alignments=('left', 'right')) print('\n') # save logs store_logs(json, tradingview, csv) if chart: charts.portfolio_vs_asset_returns() else: print(jh.color('No trades were made.', 'yellow'))
def collect_hosts(oo_cfg, existing_env=False, masters_set=False, print_summary=True): """ Collect host information from user. This will later be filled in using Ansible. Returns: a list of host information collected from the user """ click.clear() click.echo('*** Host Configuration ***') message = """ You must now specify the hosts that will compose your OpenShift cluster. Please enter an IP address or hostname to connect to for each system in the cluster. You will then be prompted to identify what role you want this system to serve in the cluster. OpenShift masters serve the API and web console and coordinate the jobs to run across the environment. Optionally, you can specify multiple master systems for a high-availability (HA) deployment. If you choose an HA deployment, then you are prompted to identify a *separate* system to act as the load balancer for your cluster once you define all masters and nodes. Any masters configured as part of this installation process are also configured as nodes. This enables the master to proxy to pods from the API. By default, this node is unschedulable, but this can be changed after installation with the 'oadm manage-node' command. OpenShift nodes provide the runtime environments for containers. They host the required services to be managed by the master. http://docs.openshift.com/enterprise/latest/architecture/infrastructure_components/kubernetes_infrastructure.html#master http://docs.openshift.com/enterprise/latest/architecture/infrastructure_components/kubernetes_infrastructure.html#node """ click.echo(message) hosts = [] roles = set(['master', 'node', 'storage', 'etcd']) more_hosts = True num_masters = 0 while more_hosts: host_props = {} host_props['roles'] = [] host_props['connect_to'] = click.prompt( 'Enter hostname or IP address', value_proc=validate_prompt_hostname) if not masters_set: if click.confirm('Will this host be an OpenShift master?'): host_props['roles'].append('master') host_props['roles'].append('etcd') num_masters += 1 if oo_cfg.settings['variant_version'] == '3.0': masters_set = True host_props['roles'].append('node') host_props['containerized'] = False if oo_cfg.settings['variant_version'] != '3.0': rpm_or_container = \ click.prompt('Will this host be RPM or Container based (rpm/container)?', type=click.Choice(['rpm', 'container']), default='rpm') if rpm_or_container == 'container': host_props['containerized'] = True host_props['new_host'] = existing_env host = Host(**host_props) hosts.append(host) if print_summary: print_installation_summary(hosts, oo_cfg.settings['variant_version']) # If we have one master, this is enough for an all-in-one deployment, # thus we can start asking if you want to proceed. Otherwise we assume # you must. if masters_set or num_masters != 2: more_hosts = click.confirm('Do you want to add additional hosts?') if num_masters > 2: master_lb = collect_master_lb(hosts) if master_lb: hosts.append(master_lb) roles.add('master_lb') else: set_cluster_hostname(oo_cfg) if not existing_env: collect_storage_host(hosts) return hosts, roles
import json import os import pandas as pd import requests import click # Constants ROOT = os.path.dirname(os.path.dirname(__file__)) DIR = 'csv_files' PATH = os.path.join(ROOT, DIR) os.makedirs(PATH, exist_ok=True) click.clear() click.echo( click.style( "For security reasons, an AUTHORIZATION TOKEN is necessary to run this script\n", fg='yellow')) AUTH_KEY = click.prompt(click.style('AUTHORIZATION KEY', fg='green'), type=str) URL_DICT = { 'price-region': '/external/v1/parr/price-regions', 'sell-price-rules': '/external/v1/parr/seasons-sell', 'listings': '/external/v1/content/listings', 'listing-sell-price': '/external/v1/parr/listing-rates-sell' } start_date = '2021-06-01'
def clear(): """Clears the entire screen.""" click.clear()
def num_choice(choices, default='1', valid_keys='', depth=1, icons='', sn_info=None, indent=4, fg_color='green', separator='', with_img=6, img_list=None, img_cache_dir='/tmp', use_cache=False, extra_hints='', clear_previous=False, quit_app=True, ): """ 传入数组, 生成待选择列表, 如果启用图片支持, 需要额外传入与数组排序一致的图片列表, - 图片在 iterms 中显示速度较慢, 不推荐使用 .. note: 图片在 iterms 中显示速度较慢, 如果数组长度大于10, 不推荐使用 .. code:: python sn_info = { 'align': '-', # 左右对齐 'length': 2, # 显示长度 } :param use_cache: :type use_cache: :param default: :type default: :param indent: ``左侧空白`` :type indent: :param fg_color: ``前景色`` :type fg_color: :param choices: 备选选项 :type choices: list :param depth: ``如果是嵌套数组, 显示当前层级`` :type depth: int :param icons: ``默认展示的icons: '❶❷❸❹❺❻❼❽❾❿'`` :type icons: any :param sn_info: ``需要展示的序号的信息长度对齐方式, 默认2个字符/右对齐`` :type sn_info: dict :param valid_keys: ``可以输入的有效 key, 使用 ',' 分隔`` :type valid_keys: str :param separator: 分隔符 header/footer, 默认无, 如果不为空, 则显示 :type separator: :param img_cache_dir: ``图片缓存目录`` :type img_cache_dir: str :param with_img: ``是否使用图片, 如果值大于0, 则以实际值大小来作为终端显示行数`` :type with_img: int :param img_list: ``图片原始 url `` :type img_list: list :param extra_hints: ``n-next,p-prev,s-skip`` :type extra_hints: any :param clear_previous: ``clear previous output`` :type clear_previous: :return: :rtype: """ icons = ICONS if not icons else icons if not choices: return None # warn: 这里需要使用 None, 不能 not default 来判断!!!, 会可能传入 0 if default is not None: default = '{}'.format(default) sn_info = sn_info or {} _header, _footer = gen_separator(separator=separator) with textui.indent(indent, quote=' {}'.format(icons[depth - 1])): if _header: textui.puts(getattr(textui.colored, fg_color)(_header)) for i, choice in enumerate(choices, start=1): if with_img > 0 and img_list: cat_net_img(img_list[i - 1], indent=indent, img_height=with_img, img_cache_dir=img_cache_dir, use_cache=use_cache) _align = '{}{}'.format(sn_info.get('align', ''), sn_info.get('length', 2)) # _hint = '%{}s. %s'.format(_align) % (i, choice) _hint_num = '%{}s.'.format(_align) % i _hint = '[{}]'.format(_hint_num) _hint = textui.colored.magenta(_hint) _hint += getattr(textui.colored, fg_color)(' %s' % choice) textui.puts(_hint) if _footer: textui.puts(getattr(textui.colored, fg_color)(_footer)) _valid = [str(x + 1) for x in range(0, len(choices))] default_prompt = 'Your Choice' valid_choices = ['q-quit', 'b-back'] if extra_hints: if isinstance(extra_hints, str): extra_hints = extra_hints.split(',') valid_choices += extra_hints default_prompt = '{}({})?'.format(default_prompt, '/'.join(valid_choices)) c = click.prompt( # click.style('[Depth: ({})]Your Choice(q-quit/b-back)?', fg='cyan').format(depth), click.style(default_prompt, fg='cyan'), type=str, default=default ) if str(c) in 'qQ': if quit_app: os._exit(0) else: if clear_previous: click.clear() return str(c) if valid_keys == 'all': return c elif str(c) in 'bB': if clear_previous: click.clear() return str(c) elif valid_keys and str(c) in valid_keys.split(','): return str(c) elif c not in _valid: textui.puts(textui.colored.red(' 😭 ✘ Invalid input[{}]'.format(c))) return num_choice( choices, default, valid_keys, depth, icons, sn_info, indent, fg_color, separator, with_img, img_list, img_cache_dir, use_cache, extra_hints, clear_previous, quit_app, ) else: return int(c) - 1
def generate_initial_population(self) -> None: """ generates the initial population """ loop_length = int(self.population_size / self.cpu_cores) with click.progressbar( length=loop_length, label='Generating initial population...') as progressbar: for i in range(loop_length): people = [] with Manager() as manager: dna_bucket = manager.list([]) workers = [] def get_fitness(dna: str, dna_bucket: list) -> None: try: fitness_score, fitness_log_training, fitness_log_testing = self.fitness( dna) dna_bucket.append( (dna, fitness_score, fitness_log_training, fitness_log_testing)) except Exception as e: proc = os.getpid() logger.error(f'process failed - ID: {str(proc)}') logger.error("".join( traceback.TracebackException.from_exception( e).format())) raise e try: for _ in range(self.cpu_cores): dna = ''.join( choices(self.charset, k=self.solution_len)) w = Process(target=get_fitness, args=(dna, dna_bucket)) w.start() workers.append(w) # join workers for w in workers: w.join() if w.exitcode > 0: logger.error( f'a process exited with exitcode: {str(w.exitcode)}' ) except KeyboardInterrupt: print(jh.color('Terminating session...', 'red')) # terminate all workers for w in workers: w.terminate() # shutdown the manager process manually since garbage collection cannot won't get to do it for us manager.shutdown() # now we can terminate the main session safely jh.terminate_app() except: raise for d in dna_bucket: people.append({ 'dna': d[0], 'fitness': d[1], 'training_log': d[2], 'testing_log': d[3] }) # update dashboard click.clear() progressbar.update(1) print('\n') table_items = [ [ 'Started at', jh.timestamp_to_arrow(self.start_time).humanize() ], [ 'Index', f'{len(self.population)}/{self.population_size}' ], [ 'errors/info', f'{len(store.logs.errors)}/{len(store.logs.info)}' ], [ 'Trading Route', f'{router.routes[0].exchange}, {router.routes[0].symbol}, {router.routes[0].timeframe}, {router.routes[0].strategy_name}' ], # TODO: add generated DNAs? # ['-'*10, '-'*10], # ['DNA', people[0]['dna']], # ['fitness', round(people[0]['fitness'], 6)], # ['training|testing logs', people[0]['log']], ] if jh.is_debugging(): table_items.insert( 3, ['Population Size', self.population_size]) table_items.insert(3, ['Iterations', self.iterations]) table_items.insert(3, ['Solution Length', self.solution_len]) table_items.insert(3, ['-' * 10, '-' * 10]) table.key_value(table_items, 'Optimize Mode', alignments=('left', 'right')) # errors if jh.is_debugging() and len(report.errors()): print('\n') table.key_value(report.errors(), 'Error Logs') for p in people: self.population.append(p) # sort the population self.population = list( sorted(self.population, key=lambda x: x['fitness'], reverse=True))
def draw_board(game, message=FlashMessage()): """ Present the game status with pictures. - Clears the screen. - Flashes any messages. - Zip the two halves of the picture together. .. code-block:: text +---------------------------------------------+ | message 45 x 1 | +---------------------------------------------+ | title 45 x 1 | +----------+----------------------------------+ | | | | | | | | | | | | | picture | misses | | 10 x 10 | 35 x 10 | | | | | | | | | | | | | +----------+----------------------------------+ | hits 45 x 1 | +---------------------------------------------+ Dare to pick a letter: _ **Example output:** .. code-block:: text HANGMAN GAME _____ | | | | MISSES: | _ _ _ _ _ _ _ _ _ _ | | ________|_ _ _ _ _ _ _ _ Dare to pick a letter: _ :param hangman.Hangman game: game instance :param hangman.utils.FlashMessage message: flash message :raises: hangman.utils.GameOverNotificationComplete """ # setup click.clear() partial_picture = build_partial_picture(game.remaining_turns) partial_misses = build_partial_misses(game.misses) # print print_partial_message(message, game.answer) print_partial_title() print_partial_body(partial_picture, partial_misses) print_partial_hits(game.status) # raise to break game loop if message.game_lost or message.game_won: raise GameOverNotificationComplete
def list_animes(watcher, quality, download_dir): watcher.list() inp = click.prompt('Select an anime', default=1) try: anime = watcher.get(int(inp)-1) except IndexError: sys.exit(0) # Make the selected anime first result watcher.update(anime) while True: click.clear() click.secho('Title: ' + click.style(anime.title, fg='green', bold=True)) click.echo('episodes_done: {}'.format(click.style( str(anime.episodes_done), bold=True, fg='yellow'))) click.echo('Length: {}'.format(len(anime))) click.echo('Provider: {}'.format(anime.sitename)) meta = '' for k, v in anime.meta.items(): meta += '{}: {}\n'.format(k, click.style(v, bold=True)) click.echo(meta) click.echo('Available Commands: set, remove, update, watch,' ' download.\n') inp = click.prompt('Press q to exit', default='q').strip() # TODO: A better way to handle commands. Use regex. Refractor to class? # Decorator? if inp == 'q': break elif inp == 'remove': watcher.remove(anime) break elif inp == 'update': watcher.update_anime(anime) elif inp == 'watch': anime.quality = quality watch_anime(watcher, anime) sys.exit(0) elif inp.startswith('download'): try: inp = inp.split('download ')[1] except IndexError: inp = ':' inp = str(anime.episodes_done+1) + \ inp if inp.startswith(':') else inp inp = inp+str(len(anime)) if inp.endswith(':') else inp anime = util.split_anime(anime, inp) if not download_dir: download_dir = Config['dl']['download_dir'] for episode in anime: episode.download(force=False, path=Config['dl']['download_dir'], format=Config['dl']['file_format']) elif inp.startswith('set '): inp = inp.split('set ')[-1] key, val = [v.strip() for v in inp.split('=')] key = key.lower() if key == 'title': watcher.remove(anime) setattr(anime, key, val) watcher.add(anime) elif key == 'episodes_done': setattr(anime, key, int(val)) watcher.update(anime) elif key == 'provider': url = util.search(anime.title, val) watcher.remove(anime) newanime = watcher.new(url) newanime.episodes_done = anime.episodes_done newanime._timestamp = anime._timestamp watcher.update(newanime) anime = newanime
def run(start_date: str, finish_date: str, candles: Dict[str, Dict[str, Union[str, np.ndarray]]] = None, chart: bool = False, tradingview: bool = False, full_reports: bool = False, csv: bool = False, json: bool = False) -> None: # clear the screen if not jh.should_execute_silently(): click.clear() # validate routes validate_routes(router) # initiate candle store store.candles.init_storage(5000) # load historical candles if candles is None: print('loading candles...') candles = load_candles(start_date, finish_date) click.clear() if not jh.should_execute_silently(): # print candles table key = f"{config['app']['considering_candles'][0][0]}-{config['app']['considering_candles'][0][1]}" table.key_value(stats.candles(candles[key]['candles']), 'candles', alignments=('left', 'right')) print('\n') # print routes table table.multi_value(stats.routes(router.routes)) print('\n') # print guidance for debugging candles if jh.is_debuggable('trading_candles') or jh.is_debuggable( 'shorter_period_candles'): print( ' Symbol | timestamp | open | close | high | low | volume' ) # run backtest simulation simulator(candles) if not jh.should_execute_silently(): # print trades metrics if store.completed_trades.count > 0: change = [] # calcualte market change for e in router.routes: if e.strategy is None: return first = Candle.select(Candle.close).where( Candle.timestamp == jh.date_to_timestamp(start_date), Candle.exchange == e.exchange, Candle.symbol == e.symbol).first() last = Candle.select(Candle.close).where( Candle.timestamp == jh.date_to_timestamp(finish_date) - 60000, Candle.exchange == e.exchange, Candle.symbol == e.symbol).first() change.append( ((last.close - first.close) / first.close) * 100.0) data = report.portfolio_metrics() data.append( ['Market Change', f"{str(round(np.average(change), 2))}%"]) print('\n') table.key_value(data, 'Metrics', alignments=('left', 'right')) print('\n') # save logs store_logs(json, tradingview, csv) if chart: charts.portfolio_vs_asset_returns() # QuantStats' report if full_reports: price_data = [] # load close candles for Buy and hold and calculate pct_change for index, c in enumerate( config['app']['considering_candles']): exchange, symbol = c[0], c[1] if exchange in config['app'][ 'trading_exchanges'] and symbol in config['app'][ 'trading_symbols']: # fetch from database candles_tuple = Candle.select( Candle.timestamp, Candle.close).where( Candle.timestamp.between( jh.date_to_timestamp(start_date), jh.date_to_timestamp(finish_date) - 60000), Candle.exchange == exchange, Candle.symbol == symbol).order_by( Candle.timestamp.asc()).tuples() candles = np.array(candles_tuple) timestamps = candles[:, 0] price_data.append(candles[:, 1]) price_data = np.transpose(price_data) price_df = pd.DataFrame(price_data, index=pd.to_datetime(timestamps, unit="ms"), dtype=float).resample('D').mean() price_pct_change = price_df.pct_change(1).fillna(0) bh_daily_returns_all_routes = price_pct_change.mean(1) quantstats.quantstats_tearsheet(bh_daily_returns_all_routes) else: print(jh.color('No trades were made.', 'yellow'))
def evolve(self) -> List[Any]: """ the main method, that runs the evolutionary algorithm """ # generate the population if starting if self.started_index == 0: self.generate_initial_population() if len(self.population) < 0.5 * self.population_size: raise ValueError( 'Too many errors: less then half of the planned population size could be generated.' ) # if even our best individual is too weak, then we better not continue if self.population[0]['fitness'] == 0.0001: print( jh.color( 'Cannot continue because no individual with the minimum fitness-score was found. ' 'Your strategy seems to be flawed or maybe it requires modifications. ', 'yellow')) jh.terminate_app() loop_length = int(self.iterations / self.cpu_cores) i = self.started_index with click.progressbar(length=loop_length, label='Evolving...') as progressbar: while i < loop_length: with Manager() as manager: people = manager.list([]) workers = [] def get_baby(people: List) -> None: try: # let's make a baby together LOL baby = self.make_love() # let's mutate baby's genes, who knows, maybe we create a x-man or something baby = self.mutate(baby) people.append(baby) except Exception as e: proc = os.getpid() logger.error(f'process failed - ID: {str(proc)}') logger.error("".join( traceback.TracebackException.from_exception( e).format())) raise e try: for _ in range(self.cpu_cores): w = Process(target=get_baby, args=[people]) w.start() workers.append(w) for w in workers: w.join() if w.exitcode > 0: logger.error( f'a process exited with exitcode: {str(w.exitcode)}' ) except KeyboardInterrupt: print(jh.color('Terminating session...', 'red')) # terminate all workers for w in workers: w.terminate() # shutdown the manager process manually since garbage collection cannot won't get to do it for us manager.shutdown() # now we can terminate the main session safely jh.terminate_app() except: raise # update dashboard click.clear() progressbar.update(1) print('\n') table_items = [ [ 'Started At', jh.timestamp_to_arrow(self.start_time).humanize() ], [ 'Index/Total', f'{(i + 1) * self.cpu_cores}/{self.iterations}' ], [ 'errors/info', f'{len(store.logs.errors)}/{len(store.logs.info)}' ], [ 'Route', f'{router.routes[0].exchange}, {router.routes[0].symbol}, {router.routes[0].timeframe}, {router.routes[0].strategy_name}' ] ] if jh.is_debugging(): table_items.insert(3, [ 'Population Size, Solution Length', f'{self.population_size}, {self.solution_len}' ]) table.key_value(table_items, 'info', alignments=('left', 'right')) # errors if jh.is_debugging() and len(report.errors()): print('\n') table.key_value(report.errors(), 'Error Logs') print('\n') print('Best DNA candidates:') print('\n') # print fittest individuals if jh.is_debugging(): fittest_list = [ [ 'Rank', 'DNA', 'Fitness', 'Training log || Testing log' ], ] else: fittest_list = [ ['Rank', 'DNA', 'Training log || Testing log'], ] if self.population_size > 50: number_of_ind_to_show = 15 elif self.population_size > 20: number_of_ind_to_show = 10 elif self.population_size > 9: number_of_ind_to_show = 9 else: raise ValueError( 'self.population_size cannot be less than 10') for j in range(number_of_ind_to_show): log = f"win-rate: {self.population[j]['training_log']['win-rate']}%, total: {self.population[j]['training_log']['total']}, PNL: {self.population[j]['training_log']['PNL']}% || win-rate: {self.population[j]['testing_log']['win-rate']}%, total: {self.population[j]['testing_log']['total']}, PNL: {self.population[j]['testing_log']['PNL']}%" if self.population[j]['testing_log'][ 'PNL'] is not None and self.population[j][ 'training_log'][ 'PNL'] > 0 and self.population[j][ 'testing_log']['PNL'] > 0: log = jh.style(log, 'bold') if jh.is_debugging(): fittest_list.append([ j + 1, self.population[j]['dna'], self.population[j]['fitness'], log ], ) else: fittest_list.append( [j + 1, self.population[j]['dna'], log], ) if jh.is_debugging(): table.multi_value(fittest_list, with_headers=True, alignments=('left', 'left', 'right', 'left')) else: table.multi_value(fittest_list, with_headers=True, alignments=('left', 'left', 'left')) # one person has to die and be replaced with the newborn baby for baby in people: random_index = randint( 1, len(self.population) - 1) # never kill our best perforemr try: self.population[random_index] = baby except IndexError: print('=============') print( f'self.population_size: {self.population_size}' ) print( f'self.population length: {len(self.population)}' ) jh.terminate_app() self.population = list( sorted(self.population, key=lambda x: x['fitness'], reverse=True)) # reaching the fitness goal could also end the process if baby['fitness'] >= self.fitness_goal: progressbar.update(self.iterations - i) print('\n') print(f'fitness goal reached after iteration {i}') return baby # save progress after every n iterations if i != 0 and int(i * self.cpu_cores) % 50 == 0: self.save_progress(i) # store a take_snapshot of the fittest individuals of the population if i != 0 and i % int(100 / self.cpu_cores) == 0: self.take_snapshot(i * self.cpu_cores) i += 1 print('\n\n') print(f'Finished {self.iterations} iterations.') return self.population
def confirm_hosts_facts(oo_cfg, callback_facts): hosts = oo_cfg.deployment.hosts click.clear() message = """ The following is a list of the facts gathered from the provided hosts. The hostname for a system inside the cluster is often different from the hostname that is resolveable from command-line or web clients, therefore these settings cannot be validated automatically. For some cloud providers, the installer is able to gather metadata exposed in the instance, so reasonable defaults will be provided. Please confirm that they are correct before moving forward. """ notes = """ Format: connect_to,IP,public IP,hostname,public hostname Notes: * The installation host is the hostname from the installer's perspective. * The IP of the host should be the internal IP of the instance. * The public IP should be the externally accessible IP associated with the instance * The hostname should resolve to the internal IP from the instances themselves. * The public hostname should resolve to the external IP from hosts outside of the cloud. """ # For testing purposes we need to click.echo only once, so build up # the message: output = message default_facts_lines = [] default_facts = {} for host in hosts: if host.preconfigured: continue try: default_facts[host.connect_to] = {} host.ip = callback_facts[host.connect_to]["common"]["ip"] host.public_ip = callback_facts[ host.connect_to]["common"]["public_ip"] host.hostname = callback_facts[ host.connect_to]["common"]["hostname"] host.public_hostname = callback_facts[ host.connect_to]["common"]["public_hostname"] except KeyError: click.echo("Problem fetching facts from {}".format( host.connect_to)) continue default_facts_lines.append(",".join([ host.connect_to, host.ip, host.public_ip, host.hostname, host.public_hostname ])) output = "%s\n%s" % (output, ",".join([ host.connect_to, host.ip, host.public_ip, host.hostname, host.public_hostname ])) output = "%s\n%s" % (output, notes) click.echo(output) facts_confirmed = click.confirm("Do the above facts look correct?") if not facts_confirmed: message = """ Edit %s with the desired values and run `atomic-openshift-installer --unattended install` to restart the install. """ % oo_cfg.config_path click.echo(message) # Make sure we actually write out the config file. oo_cfg.save_to_disk() sys.exit(0) return default_facts
def keep_going(): click.clear()
def generate_signal(start_date: str, finish_date: str, candles=None, chart=False, tradingview=False, csv=False, json=False): # clear the screen if not jh.should_execute_silently(): click.clear() # validate routes validate_routes(router) # initiate candle store store.candles.init_storage(5000) # load historical candles if candles is None: print('loading candles...') candles = load_candles(start_date, finish_date) click.clear() if not jh.should_execute_silently(): # print candles table key = '{}-{}'.format(config['app']['considering_candles'][0][0], config['app']['considering_candles'][0][1]) table.key_value(stats.candles(candles[key]['candles']), 'candles', alignments=('left', 'right')) print('\n') # print routes table table.multi_value(stats.routes(router.routes)) print('\n') # print guidance for debugging candles if jh.is_debuggable('trading_candles') or jh.is_debuggable( 'shorter_period_candles'): print( ' Symbol | timestamp | open | close | high | low | volume' ) # run backtest simulation signal_simulator(candles) if not jh.should_execute_silently(): # print trades statistics if store.completed_trades.count > 0: print('\n') table.key_value(report.portfolio_metrics(), 'Metrics', alignments=('left', 'right')) print('\n') # save logs store_logs(json, tradingview, csv) if chart: charts.portfolio_vs_asset_returns() # # clone the trades so that the original is not mutated # completed_trades = copy.deepcopy(store.completed_trades.trades) # # for trade in completed_trades: # print(trade.to_dict()) # # # filter trades which were generated for current day # completed_trades = filter(lambda x: x.opened_at == jh.get_current_time_in_epoch(), completed_trades) # # # TODO: add a slack notification here instead of print to notify the user # print("Trades to execute today: ") # for trade in completed_trades: # print(trade.to_dict()) else: print(jh.color('No trades were completed/closed.', 'yellow')) open_positions = store.positions.get_open_positions() open_orders = store.orders.get_all_active_orders() current_report = jh.generate_signals_report(open_positions, open_orders) return current_report
def felix(click_config, action, teacher_uri, enode, min_stake, network, host, dry_run, port, discovery_port, provider_uri, geth, config_root, checksum_address, poa, config_file, db_filepath, no_registry, registry_filepath, dev, force): # Intro click.clear() if not click_config.quiet: click.secho(FELIX_BANNER.format(checksum_address or '')) ETH_NODE = NO_BLOCKCHAIN_CONNECTION if geth: ETH_NODE = actions.get_provider_process(dev=dev) provider_uri = ETH_NODE.provider_uri if action == "init": """Create a brand-new Felix""" if not config_root: # Flag config_root = DEFAULT_CONFIG_ROOT # Envvar or init-only default # Acquire Keyring Password new_password = click_config.get_password(confirm=True) try: new_felix_config = FelixConfiguration.generate( password=new_password, config_root=config_root, rest_host=host, rest_port=discovery_port, db_filepath=db_filepath, domains={network} if network else None, checksum_address=checksum_address, download_registry=not no_registry, registry_filepath=registry_filepath, provider_uri=provider_uri, provider_process=ETH_NODE, poa=poa) except Exception as e: if click_config.debug: raise else: click.secho(str(e), fg='red', bold=True) raise click.Abort # Paint Help painting.paint_new_installation_help( new_configuration=new_felix_config) return # <-- do not remove (conditional flow control) # Domains -> bytes | or default domains = [network] if network else None # Load Felix from Configuration File with overrides try: felix_config = FelixConfiguration.from_configuration_file( filepath=config_file, domains=domains, registry_filepath=registry_filepath, provider_process=ETH_NODE, provider_uri=provider_uri, rest_host=host, rest_port=port, db_filepath=db_filepath, poa=poa) except FileNotFoundError: click.secho( f"No Felix configuration file found at {config_file}. " f"Check the filepath or run 'nucypher felix init' to create a new system configuration." ) raise click.Abort try: # Connect to Blockchain felix_config.connect_to_blockchain() # Authenticate password = click_config.get_password(confirm=False) click_config.unlock_keyring(character_configuration=felix_config, password=password) # Produce Teacher Ursulas teacher_nodes = actions.load_seednodes( teacher_uris=[teacher_uri] if teacher_uri else None, min_stake=min_stake, federated_only=felix_config.federated_only, network_domains=felix_config.domains, network_middleware=click_config.middleware) # Produce Felix FELIX = felix_config.produce(domains=network, known_nodes=teacher_nodes) FELIX.make_web_app() # attach web application, but dont start service except Exception as e: if click_config.debug: raise else: click.secho(str(e), fg='red', bold=True) raise click.Abort if action == "createdb": # Initialize Database if os.path.isfile(FELIX.db_filepath): if not force: click.confirm("Overwrite existing database?", abort=True) os.remove(FELIX.db_filepath) click.secho(f"Destroyed existing database {FELIX.db_filepath}") FELIX.create_tables() click.secho(f"\nCreated new database at {FELIX.db_filepath}", fg='green') elif action == 'view': token_balance = FELIX.token_balance eth_balance = FELIX.eth_balance click.secho(f""" Address .... {FELIX.checksum_address} NU ......... {str(token_balance)} ETH ........ {str(eth_balance)} """) elif action == "accounts": accounts = FELIX.blockchain.client.accounts for account in accounts: click.secho(account) elif action == "destroy": """Delete all configuration files from the disk""" actions.destroy_configuration(character_config=felix_config, force=force) elif action == 'run': # Start web services try: click.secho("Waiting for blockchain sync...", fg='yellow') FELIX.start(host=host, port=port, web_services=not dry_run, distribution=True, crash_on_error=click_config.debug) finally: FELIX.blockchain.disconnect() else: raise click.BadArgumentUsage("No such argument {}".format(action))
def review(self, i=None, number_of_notes=None, remove_actions=None): """Interactive review of the note This method is used by the review command. if the arguments "i" and "number_of_notes" are supplied, then they are displayed to show review progress. The "remove_actions" argument can be used to remove a default action from the action menu. """ actions = { 'c': 'Continue', 'e': 'Edit', 'd': 'Delete', 'f': 'Show images', 'm': 'Toggle markdown', '*': 'Toggle marked', 'z': 'Toggle suspend', 'a': 'Add new', 's': 'Save and stop', 'x': 'Abort', } if remove_actions: actions = {key: val for key, val in actions.items() if val not in remove_actions} refresh = True while True: if refresh: click.clear() if i is None: click.secho('Reviewing note\n', fg='white') elif number_of_notes is None: click.secho(f'Reviewing note {i+1}\n', fg='white') else: click.secho(f'Reviewing note {i+1} of {number_of_notes}\n', fg='white') column = 0 for x, y in actions.items(): menu = click.style(x, fg='blue') + ': ' + y if column < 3: click.echo(f'{menu:28s}', nl=False) else: click.echo(menu) column = (column + 1) % 4 width = os.get_terminal_size()[0] click.echo('\n' + '-'*width + '\n') self.print() else: refresh = True choice = readchar.readchar() action = actions.get(choice) if action == 'Continue': return True if action == 'Edit': self.edit() continue if action == 'Delete': if click.confirm('Are you sure you want to delete the note?'): self.delete() return True if action == 'Show images': self.show_images() refresh = False continue if action == 'Toggle markdown': self.toggle_markdown() continue if action == 'Toggle marked': self.toggle_marked() continue if action == 'Toggle suspend': self.toggle_suspend() continue if action == 'Add new': click.echo('-'*width + '\n') notes = self.a.add_notes_with_editor( tags=self.get_tag_string(), model_name=self.model_name, template=self) number_of_notes = len(notes) click.echo(f'Added {number_of_notes} notes') click.confirm('Press any key to continue.', prompt_suffix='', show_default=False) continue if action == 'Save and stop': click.echo('Stopped') return False if action == 'Abort': if self.a.modified: if not click.confirm( 'Abort: Changes will be lost. Continue [y/n]?', show_default=False): continue self.a.modified = False raise click.Abort()
def cmd_clear(): click.clear()
def menu(): checkconfig() menu = 'main' while True: if menu == 'main': click.clear() printbanner() click.echo('\nMain menu:') click.echo(' 1. Send a file') click.echo(' 2. Receive a file') click.echo(' 3. Instructions') click.echo(' 4. Settings') click.echo(' 5. About') click.echo(' q. Quit') click.echo('\n Version 0.0.1') click.echo(' By Tommy Riska') char = click.getchar() # Recieve a file if char == '1': click.clear() printbanner() sender() # Send a file elif char == '2': click.clear() printbanner() reciever() # Instructions elif char == '3': click.clear() click.echo_via_pager(""" _________________________________________________________________________ | _______ _____ | | | ____| | __ | | | | |__ ___| |__) |___ | | | __| |_ / ___/_ / | | | |____ / /| | / / | | |______/___|_| /___| | |_______________________________________________________________________| INSTRUCTIONS: Send a file: 1. Select menu option #1. 2. Select port, default is 1337. 3. Select file to send. 4. Wait for incoming connection. 5. Wait for file transfer to be done then click 'M' to return to main menu. Receive a file: 1. Select menu option #2. 2. Select IP to connect to. This should be senders public IP or local IP in the case of a LAN transfer. 3. Select a port to connect to. Has to be specifically the port the sender selected. 4. Wait for download to be finished then click 'M' to return to main menu. Important! All files will currently be saved in users home directory. More configurations will come later. Press 'Q' to get back to main menu. """) menu == 'main' # Configuration elif char == '4': click.clear() printbanner() with open('config.json', 'r') as f: config = json.load(f) click.echo(json.dumps(config)) click.echo('\nSettings are not implemented yet!') click.echo('\nBack: b') char = click.getchar() if char == 'b': menu == 'main' # About section elif char == '5': click.clear() printbanner() click.echo('ABOUT EzPz:\n\n') click.echo('Made by Tommy Riska') click.echo('Version 0.0.1') click.echo('\nBack: b') char = click.getchar() if char == 'b': menu = 'main' else: click.echo('Invalid input') # Quit elif char == 'q': click.clear() return
async def handleMsg(self, messages: List[Dict], position: str) -> ActionFlow: message = messages[0] data = message node = data['node'] if node not in self.nodes and self.shouldProcessNode(node): # nodeEntry = [data['node'][:8]] nodeEntry = [node] nodeEntryHeaders = ['Nodes'] cobraData = data['data']['cobra'] for key in sorted(cobraData.keys()): if self.system: skip = False for keyName in ('published', 'subscribed', 'subscriptions'): if keyName in key: skip = True if skip: continue s = sum(cobraData[key].values()) self.metrics[key] += s s = self.humanReadableSize(key, s) nodeEntry.append(s) nodeEntryHeaders.append(key) self.updateRoleMetrics(cobraData) channelData = data['data'].get('channel_data') if channelData is not None: self.updateChannelMetrics(channelData) # System stats for metric in data['data']['system'].keys(): val = data['data']['system'][metric] if metric == 'connections': self.metrics[metric] += val nodeEntryHeaders.append(metric) val = self.humanReadableSize(metric, val) nodeEntry.append(val) self.nodeEntries.append(nodeEntry) self.nodes.add(node) if self.nodeEntriesHeader is None: self.nodeEntriesHeader = nodeEntryHeaders if self.throttle.exceedRate(): return ActionFlow.CONTINUE click.clear() # print(yaml.dump(data)) self.metrics = { key: self.humanReadableSize(key, val) for key, val in self.metrics.items() } if self.showSumarry: print(writeJson(self.metrics)) print() # Print a table with all nodes nodeEntries = [self.nodeEntriesHeader] self.nodeEntries.sort() nodeEntries.extend(self.nodeEntries) # Transpose our array and print the nodes horizontally if 0 < len(nodeEntries) < 8: nodeEntries = transpose(nodeEntries) if self.showNodes: print(tabulate.tabulate(nodeEntries, tablefmt="simple", headers="firstrow")) if self.showRoles: self.displayRoleMetrics() if self.showChannels: self.displayChannelMetrics() self.resetMetrics() return ActionFlow.STOP if self.once else ActionFlow.CONTINUE
def hline(x,y,c,s,d): for i in range(s,s+1): h.line(x,y,c,i,d) click.clear() print(h.show()) time.sleep(0.02)
def evolve(self): """ the main method, that runs the evolutionary algorithm """ # generate the population if starting if self.started_index == 0: self.generate_initial_population() cores_num = cpu_count() loop_length = int(self.iterations / cores_num) i = self.started_index with click.progressbar(length=loop_length, label='Evolving...') as progressbar: while i < loop_length: with Manager() as manager: people = manager.list([]) workers = [] def get_baby(people): """ :param people: """ # let's make a baby together LOL baby = self.make_love() # let's mutate baby's genes, who knows, maybe we create a x-man or something baby = self.mutate(baby) people.append(baby) for _ in range(cores_num): w = Process(target=get_baby, args=[people]) w.start() workers.append(w) for w in workers: w.join() # update dashboard click.clear() progressbar.update(1) print('\n') table.key_value( [[ 'started at', jh.get_arrow(self.start_time).humanize() ], [ 'index/total', '{}/{}'.format( (i + 1) * cores_num, self.iterations) ], [ 'population_size, solution_len', '{}, {}'.format( self.population_size, self.solution_len) ], [ 'route', '{}, {}, {}, {}'.format( router.routes[0].exchange, router.routes[0].symbol, router.routes[0].timeframe, router.routes[0].strategy_name) ]], 'info', alignments=('left', 'right')) print('\n') # print fittest individuals fittest_list = [ ['rank', 'DNA', 'fitness', 'training|testing logs'], ] if self.population_size > 50: number_of_ind_to_show = 25 elif self.population_size > 20: number_of_ind_to_show = 20 elif self.population_size > 9: number_of_ind_to_show = 9 else: raise ValueError( 'self.population_size cannot be less than 10') for j in range(number_of_ind_to_show): fittest_list.append([ j + 1, self.population[j]['dna'], self.population[j]['fitness'], self.population[j]['log'] ], ) table.multi_value(fittest_list, with_headers=True, alignments=('left', 'left', 'right', 'left')) # one person has to die and be replaced with the newborn baby for baby in people: random_index = randint(0, len(self.population) - 1) try: self.population[random_index] = baby except IndexError: print('=============') print('self.population_size: {}'.format( self.population_size)) print('self.population length: {}'.format( len(self.population))) jh.terminate_app() self.population = list( sorted(self.population, key=lambda x: x['fitness'], reverse=True)) # reaching the fitness goal could also end the process if baby['fitness'] >= self.fitness_goal: progressbar.update(self.iterations - i) print('\n') print('fitness goal reached after iteration {}'. format(i)) return baby # save progress after every n iterations if i != 0 and int(i * cores_num) % 50 == 0: self.save_progress(i) # store a take_snapshot of the fittest individuals of the population if i != 0 and i % int(100 / cores_num) == 0: self.take_snapshot(i * cores_num) i += 1 print('\n\n') print('Finished {} iterations.'.format(self.iterations)) return self.population
def clear(self): # tidy up the screen click.clear()
def run( commands, shell=None, prompt_template="default", speed=1, quiet=False, test_mode=False, commentecho=False, ): """Main function for "magic-running" a list of commands.""" if not quiet: secho("We'll do it live!", fg="red", bold=True) secho( "STARTING SESSION: Press Ctrl-C at any time to exit.", fg="yellow", bold=True, ) click.pause() click.clear() state = SessionState( shell=shell, prompt_template=prompt_template, speed=speed, test_mode=test_mode, commentecho=commentecho, ) i = 0 while i < len(commands): command = commands[i].strip() i += 1 if not command: continue is_comment = command.startswith("#") if not is_comment: command_as_list = shlex.split(command) else: command_as_list = None shell_match = SHELL_RE.match(command) if is_comment: # Parse comment magic match = OPTION_RE.match(command) if match: option, arg = match.group("option"), match.group("arg") func = OPTION_MAP[option] func(state, arg) elif state.commentecho(): comment = command.lstrip("#") secho(comment, fg="yellow", bold=True) continue # Handle 'export' and 'alias' commands by storing them in SessionState elif command_as_list and command_as_list[0] in ["alias", "export"]: magictype(command, prompt_template=state["prompt_template"], speed=state["speed"]) # Store the raw commands instead of using add_envvar and add_alias # to avoid having to parse the command ourselves state.add_command(command) # Handle ```python and ```ipython by running "player" consoles elif shell_match: shell_name = shell_match.groups()[0].strip() py_commands = [] more = True while more: # slurp up all the python code try: py_command = commands[i].rstrip() except IndexError: raise SessionError("Unmatched {} code block in " "session file.".format(shell_name)) i += 1 if py_command.startswith("```"): i += 1 more = False else: py_commands.append(py_command) # Run the player console magictype( shell_name, prompt_template=state["prompt_template"], speed=state["speed"], ) if shell_name == "ipython": from doitlive.ipython import start_ipython_player # dedent all the commands to account for IPython's autoindentation ipy_commands = [textwrap.dedent(cmd) for cmd in py_commands] start_ipython_player(ipy_commands, speed=state["speed"]) else: start_python_player(py_commands, speed=state["speed"]) else: # goto_stealthmode determines when to switch to stealthmode goto_stealthmode = magicrun(command, **state) # stealthmode allows user to type live commands outside of automated script i -= stealthmode(state, goto_stealthmode) echo_prompt(state["prompt_template"]) wait_for(RETURNS) if not quiet: secho("FINISHED SESSION", fg="yellow", bold=True)
def generate_initial_population(self): """ generates the initial population """ cores_num = cpu_count() loop_length = int(self.population_size / cores_num) with click.progressbar( length=loop_length, label='Generating initial population...') as progressbar: for i in range(loop_length): people = [] with Manager() as manager: dna_bucket = manager.list([]) workers = [] def get_fitness(dna, dna_bucket): """ :param dna: :param dna_bucket: """ fitness_score, fitness_log = self.fitness(dna) dna_bucket.append((dna, fitness_score, fitness_log)) for _ in range(cores_num): dna = ''.join( choices(self.charset, k=self.solution_len)) w = Process(target=get_fitness, args=(dna, dna_bucket)) w.start() workers.append(w) # join workers for w in workers: w.join() for d in dna_bucket: people.append({ 'dna': d[0], 'fitness': d[1], 'log': d[2] }) # update dashboard click.clear() progressbar.update(1) print('\n') table.key_value([ ['started at', jh.get_arrow(self.start_time).humanize()], [ 'index/total', '{}/{}'.format(len(self.population), self.population_size) ], ['-', '-'], ['population_size', self.population_size], ['iterations', self.iterations], ['solution_len', self.solution_len], [ 'route', '{}, {}, {}, {}'.format( router.routes[0].exchange, router.routes[0].symbol, router.routes[0].timeframe, router.routes[0].strategy_name) ], ['-', '-'], ['DNA', people[0]['dna']], ['fitness', round(people[0]['fitness'], 6)], ['training|testing logs', people[0]['log']], ], 'baby', alignments=('left', 'right')) for p in people: self.population.append(p) # sort the population self.population = list( sorted(self.population, key=lambda x: x['fitness'], reverse=True))
'ls': [_ls, 'List all files in current working directory'], 'cp': [_cp, 'Copy a file or a directory to a target location'], 'mv': [_mv, 'Move a file from one place to another'] } def input_checker(): global ui long_ui = ui.split(' && ') for one_ui in long_ui: ui = one_ui x = ui.split(' ')[0] if x in commands.keys(): commands[x][0]() else: if len(ui) == 0: print(random.choice(best_quotes_from_anonymous)) else: print('Error: Unkown Command') if __name__ == "__main__": # The main process try: while True: click.clear() # Clear the console view print(skull) # Prints the cool title screen thing input_checker() ui = input(f'\n\n >DEDSEC:/ ') except KeyboardInterrupt: os._exit(0)
def mainfunc(username, password, chatroom, servaddr): try: click.clear() print_formatted_text("\n") print_formatted_text( HTML( "[" + obtntime() + "] " + "SNCTRYZERO > <b><seagreen>Starting Sanctuary ZERO v18102020 up...</seagreen></b>" )) print_formatted_text( HTML("[" + obtntime() + "] " + "SNCTRYZERO > <seagreen>Attempted connection to '" + servaddr + "' at " + str(time.ctime()) + "</seagreen>")) if username.strip() != "": if chatroom is None: print_formatted_text( HTML( "[" + obtntime() + "] " + "SNCTRYZERO > <green>A new chatroom was generated</green>" )) chatroom = randgene() else: if chekroom(chatroom) is True: print_formatted_text( HTML( "[" + obtntime() + "] " + "SNCTRYZERO > <green>A valid chatroom identity was entered</green>" )) elif not chatroom.isupper(): chatroom = chatroom.upper() if chekroom(chatroom): print_formatted_text( HTML( "[" + obtntime() + "] " + "SNCTRYZERO > <green>A valid chatroom identity was entered</green>" )) else: print_formatted_text( HTML( "[" + obtntime() + "] " + "SNCTRYZERO > <red>An invalid chatroom identity was entered</red>" )) sys.exit() else: print_formatted_text( HTML( "[" + obtntime() + "] " + "SNCTRYZERO > <red>An invalid chatroom identity was entered</red>" )) sys.exit() if password is None: print_formatted_text( HTML( "[" + obtntime() + "] " + "SNCTRYZERO > <green>A new password was generated</green>" )) password = Fernet.generate_key().decode("utf8") else: if chekpass(password) is True: print_formatted_text( HTML( "[" + obtntime() + "] " + "SNCTRYZERO > <green>A valid chatroom password was entered</green>" )) else: print_formatted_text( HTML( "[" + obtntime() + "] " + "SNCTRYZERO > <red>An invalid chatroom password was entered</red>" )) sys.exit() else: print_formatted_text( HTML( "[" + obtntime() + "] " + "SNCTRYZERO > <red>An invalid username was entered</red>")) sys.exit() print_formatted_text( HTML("[" + obtntime() + "] " + "SNCTRYZERO > <seagreen><b>Identity</b> " + chatroom + " > <b>Password</b> " + password + "</seagreen>")) print_formatted_text( HTML( "[" + obtntime() + "] " + "SNCTRYZERO > <seagreen>Share the chatroom identity, password and server address to invite members</seagreen>" )) asyncio.get_event_loop().run_until_complete( hello(servaddr, username, chatroom, password)) except KeyboardInterrupt as EXPT: print_formatted_text( HTML("[" + obtntime() + "] " + "SNCTRYZERO > <red>Leaving SNCTRYZERO...</red>")) sys.exit() except OSError as EXPT: print_formatted_text( HTML( "[" + obtntime() + "] " + "SNCTRYZERO > <red>A connection to the server could not be established</red>" )) sys.exit() except websockets.exceptions.ConnectionClosedError as EXPT: print_formatted_text( HTML( "[" + obtntime() + "] " + "SNCTRYZERO > <red>A connection to the server was lost</red>")) sys.exit()