def standings(self, league_table, league): """ Prints the league standings in a pretty way """ click.secho("%-6s %-30s %-10s %-10s %-10s" % ("POS", "CLUB", "PLAYED", "GOAL DIFF", "POINTS")) for team in league_table["standing"]: if team["goalDifference"] >= 0: team["goalDifference"] = ' ' + str(team["goalDifference"]) # Define the upper and lower bounds for Champions League, # Europa League and Relegation places. # This is so we can highlight them appropriately. cl_upper, cl_lower = LEAGUE_PROPERTIES[league]['cl'] el_upper, el_lower = LEAGUE_PROPERTIES[league]['el'] rl_upper, rl_lower = LEAGUE_PROPERTIES[league]['rl'] team_str = (u"{position:<7} {teamName:<33} {playedGames:<12}" u" {goalDifference:<14} {points}").format(**team) if cl_upper <= team["position"] <= cl_lower: click.secho(team_str, bold=True, fg=self.colors.CL_POSITION) elif el_upper <= team["position"] <= el_lower: click.secho(team_str, fg=self.colors.EL_POSITION) elif rl_upper <= team["position"] <= rl_lower: click.secho(team_str, fg=self.colors.RL_POSITION) else: click.secho(team_str, fg=self.colors.POSITION)
def users(): """List all of the team's users and their status""" try: response = slack_client.users.list(presence=True) except Exception as e: click.echo(str(e)) if response.successful: users = response.body['members'] # Collect array of arrays that contain user data in column order table_data = [] for user in users: if not user['deleted']: user_data = [user['name'], user.get('real_name', None), user.get('presence', 'bot'), user['profile'].get('email', None)] table_data.append(user_data) table_data.sort(key=operator.itemgetter(2)) table_headers = [click.style('User', fg='yellow'), click.style('Name', fg='yellow'), click.style('Presence', fg='yellow'), click.style('Email', fg='yellow')] click.secho(tabulate.tabulate(table_data, table_headers, tablefmt="fancy_grid")) else: click.secho('wtf')
def nice_echo(msg): if msg.startswith('+++'): click.secho(msg, fg='green') elif msg.startswith('***'): click.secho(msg, fg='red', err=True) else: click.echo(msg)
def generate_user_geocodes(self, csv_path, cache_path): """Generates geocodes for the user's provided location. :type csv_path: str :param csv_path: The user geocodes CSV path to update. :type cache_path: str :param cache_path: The user geocodes cache path to update. """ count = 0 for user_id, user in self.cached_users.items(): if count >= self.CFG_MAX_GEOCODES: break if user_id in self.user_geocodes_map: continue if user.location is not None: count += 1 geocode = geocoder.google(user.location) click.echo('geocoder status: {0} {1} '.format(str(count), geocode.status)) if geocode.status == 'OVER_QUERY_LIMIT': click.secho('Geocode rate limit exceeded!', fg='red') break self.user_geocodes_map[user_id] = geocode else: self.user_geocodes_map[user_id] = '' self.write_csv_users_geocodes(csv_path) self.save_user_geocodes_cache(cache_path) self.print_num_users_missing_geocodes()
def export_data(input_path, output_path, output_format): import qiime2.util import qiime2.sdk import distutils result = qiime2.sdk.Result.load(input_path) if output_format is None: if isinstance(result, qiime2.sdk.Artifact): output_format = result.format.__name__ else: output_format = 'Visualization' result.export_data(output_path) else: if isinstance(result, qiime2.sdk.Visualization): error = '--output-format cannot be used with visualizations' click.secho(error, fg='red', bold=True, err=True) click.get_current_context().exit(1) else: source = result.view(qiime2.sdk.parse_format(output_format)) if os.path.isfile(str(source)): if os.path.isfile(output_path): os.remove(output_path) else: # create directory (recursively) if it doesn't exist yet os.makedirs(os.path.dirname(output_path), exist_ok=True) qiime2.util.duplicate(str(source), output_path) else: distutils.dir_util.copy_tree(str(source), output_path) output_type = 'file' if os.path.isfile(output_path) else 'directory' success = 'Exported %s as %s to %s %s' % (input_path, output_format, output_type, output_path) click.secho(success, fg='green')
def download_images(image_paths): headers = {"User-Agent": smugmug.application} cookies = {} session_id = getattr(smugmug, 'session_id', None) # TODO: This apparently allows downloading of private images, but doesn't # actually work in testing. if session_id: cookies["SMSESS"] = session_id session = requests.Session() session.headers = headers session.cookies = requests.utils.cookiejar_from_dict(cookies) with click.progressbar(image_paths, label='Downloading images') as paths: for image_path, url, checked_md5sum in paths: req = session.get(url) if checked_md5sum: md5sum = hashlib.md5() md5sum.update(req.content) if md5sum.hexdigest() != checked_md5sum: click.secho( 'Checksum for downloaded image %s incorrect; skipping ' 'image' % image_path, fg='red') continue if not req.content: click.secho( 'Downloaded image %s is empty; skipping ' % url, fg='red') continue with open(image_path, 'wb') as f: f.write(req.content)
def cli(force): """ Update AerisCloud """ if not force and config.get('github', 'enabled', default=False) == 'true': client = Github().gh repo = client.repository('aeriscloud', 'aeriscloud') latest_release = repo.iter_releases().next() latest_version = latest_release.tag_name[1:] if semver.compare(version, latest_version) != -1: click.secho('AerisCloud is already up to date!', fg='green') sys.exit(0) click.echo('A new version of AerisCloud is available: %s (%s)' % ( click.style(latest_version, fg='green', bold=True), click.style(latest_release.name, bold=True) )) # retrieve install script in a tmpfile tmp = tempfile.NamedTemporaryFile() r = requests.get('https://raw.githubusercontent.com/' + 'AerisCloud/AerisCloud/develop/scripts/install.sh') if r.status_code != 200: fatal('error: update server returned %d (%s)' % ( r.status_code, r.reason)) tmp.write(r.content) tmp.flush() os.environ['INSTALL_DIR'] = aeriscloud_path call(['bash', tmp.name]) tmp.close()
def check_record_doi(record, update=False): """ Checks that the DOI of a record is registered.""" recid = record.get('_deposit', {}).get('id') click.secho('checking DOI for record {}'.format(recid)) doi_list = [DataCiteProvider.get(d.get('value')) for d in record['_pid'] if d.get('type') == 'DOI'] for doi in doi_list: if _datacite_doi_reference(doi.pid.pid_value) is None: if doi.pid.status == PIDStatus.REGISTERED: # the doi is not truly registered with datacite click.secho(' {}: not registered with datacite'.format( doi.pid.pid_value)) doi.pid.status = PIDStatus.RESERVED click.secho(' {}: {}'.format(doi.pid.pid_value, doi.pid.status)) if doi.pid.status != PIDStatus.RESERVED: continue # RESERVED but not REGISTERED if update: recid = record.get('_deposit', {}).get('id') url = make_record_url(recid) doc = datacite_v31.serialize(doi.pid, record) _datacite_register_doi(doi, url, doc) db.session.commit() click.secho(' registered just now', fg='green', bold=True) else: click.secho(' not registered', fg='red', bold=True)
def cli(ctx, email, password, group, nick, display_name, force): """ Creates a new user account """ assert isinstance(ctx, Context) # Make sure our configuration directory exists config_dir = os.path.join(FireflyIRC.CONFIG_DIR, 'config') if not os.path.exists(config_dir): os.makedirs(config_dir, 0o755) # Make sure the user doesn't already exist in our servers configuration users_config = FireflyIRC.load_configuration('users') users_cfg_path = os.path.join(config_dir, 'users.cfg') if email in users_config.sections(): ctx.log.info('Configuration for %s already exists', email) if not force: raise click.ClickException('Configuration for {e} already exists'.format(e=email)) users_config.remove_section(email) # Populate users.cfg users_config.add_section(email) users_config.set(email, 'Password', bcrypt.encrypt(password)) users_config.set(email, 'Group', group) users_config.set(email, 'Nick', nick) users_config.set(email, 'DisplayName', display_name) # Write to our users configuration file with open(users_cfg_path, 'w') as cf: users_config.write(cf) click.secho('Configuration for user {e} successfully generated'.format(e=email), bold=True) click.secho('Users configuration path: {sp}'.format(sp=users_cfg_path), bold=True)
def main(league, time, standings, team, live, use12hour, players, output_format, output_file, upcoming, lookup): """A CLI for live and past football scores from various football leagues""" try: if output_format == 'stdout' and output_file: raise IncorrectParametersException('Printing output to stdout and ' 'saving to a file are mutually exclusive') writer = get_writer(output_format, output_file) if live: get_live_scores(writer, use12hour) return if standings: if not league: raise IncorrectParametersException('Please specify a league. ' 'Example --standings --league=EPL') get_standings(league, writer) return if team: if lookup: map_team_id(team) return if players: get_team_players(team, writer) return else: get_team_scores(team, time, writer, upcoming, use12hour) return get_league_scores(league, time, writer, upcoming, use12hour) except IncorrectParametersException as e: click.secho(e.message, fg="red", bold=True)
def __profile__(filename, code, globals_, timer=None, pickle_protocol=PICKLE_PROTOCOL, dump_filename=None, mono=False): frame = sys._getframe() profiler = Profiler(timer, top_frame=frame, top_code=code) profiler.start() try: exec_(code, globals_) except: # don't profile print_exc(). profiler.stop() traceback.print_exc() else: profiler.stop() if PY2: # in Python 2, exec's cpu time is duplicated with actual cpu time. stat = profiler.stats.get_child(frame.f_code) stat.remove_child(exec_.func_code) if dump_filename is None: viewer, loop = make_viewer(mono) viewer.set_stats(profiler.stats, get_title(filename)) try: loop.run() except KeyboardInterrupt: pass else: stats = profiler.result() with open(dump_filename, 'wb') as f: pickle.dump(stats, f, pickle_protocol) click.echo('To view statistics:') click.echo(' $ python -m profiling view ', nl=False) click.secho(dump_filename, underline=True)
def vmmac(obj, uuid): '''ovf extract ''' # pp = pprint.PrettyPrinter(indent=4) with omi_channel(obj.getSettings('otec')['host'], obj.getSettings('otec')['user'], obj.getSettings('otec')['password'], 443) as channel: vm = channel.content.searchIndex.FindByUuid(None, uuid, True) if vm: try: nics = [dev for dev in vm.config.hardware.device if isinstance(dev, vim.vm.device.VirtualEthernetCard)] for nic in nics: click.secho("{} ⇒❯ Nic {} on {} ".format(vm.name, nic.macAddress, nic.backing.network.name), fg='green') disks = [d for d in vm.config.hardware.device if isinstance(d, vim.vm.device.VirtualDisk) and isinstance(d.backing, vim.vm.device.VirtualDisk.FlatVer2BackingInfo)] # pp.pprint(disks) for disk in disks: click.secho("{} ⇒❯ Disk {} on {} ".format(vm.name, disk.deviceInfo.label, disk.backing.fileName), fg='green') except vmodl.MethodFault as vmomi_fault: click.secho("WMvare error: {}".format(vmomi_fault.msg), fg='red') except Exception as std_exception: click.secho("standard error: {}".format(str(std_exception)), fg='red') else: click.secho("No matching VM for {}".format(uuid), fg='red')
def createJar(fileName, files, outdir): if verbosity > 1: jarCommand = ["jar", "-cvfM", path.join(outdir, fileName), "-C", outdir, files] click.secho(" ".join(jarCommand), fg='magenta') else: jarCommand = ["jar", "-cfM", path.join(outdir, fileName), "-C", outdir, files] call(jarCommand)
def console(instance_or_stack_ref, limit, region, w, watch): '''Print EC2 instance console output. INSTANCE_OR_STACK_REF can be an instance ID, private IP address or stack name/version.''' if all(x.startswith('i-') for x in instance_or_stack_ref): stack_refs = None filters = {'instance-id': list(instance_or_stack_ref)} elif all(is_ip_address(x) for x in instance_or_stack_ref): stack_refs = None filters = {'private-ip-address': list(instance_or_stack_ref)} else: stack_refs = get_stack_refs(instance_or_stack_ref) # filter out instances not part of any stack filters = {'tag-key': 'aws:cloudformation:stack-name'} region = get_region(region) check_credentials(region) conn = boto.ec2.connect_to_region(region) for _ in watching(w, watch): for instance in conn.get_only_instances(filters=filters): cf_stack_name = instance.tags.get('aws:cloudformation:stack-name') if not stack_refs or matches_any(cf_stack_name, stack_refs): output = conn.get_console_output(instance.id) click.secho('Showing last {} lines of {}..'.format(limit, instance.private_ip_address or instance.id), bold=True) if output.output: for line in output.output.decode('utf-8', errors='replace').split('\n')[-limit:]: print_console(line)
def main(): try: if "cygwin" in system().lower(): raise exception.CygwinEnvDetected() # https://urllib3.readthedocs.org # /en/latest/security.html#insecureplatformwarning try: requests.packages.urllib3.disable_warnings() except AttributeError: raise exception.PlatformioException( "Invalid installation of Python `requests` package`. See " "< https://github.com/platformio/platformio/issues/252 >") cli(None, None, None) except Exception as e: # pylint: disable=W0703 if not isinstance(e, exception.ReturnErrorCode): maintenance.on_platformio_exception(e) error_str = "Error: " if isinstance(e, exception.PlatformioException): error_str += str(e) else: error_str += format_exc() click.secho(error_str, fg="red", err=True) return 1 return 0
def cli(ctx, no_retweets, language_filter, verbose): """ \b _______ ___ ___ | || | | | |_ _|| | | | | | | | | | | | | |___ | | | | | || | |___| |_______||___| Twitter Line Interface """ banner() consumer_key, consumer_secret, access_token, access_token_secret, username = readconfig() if language_filter: click.secho('[v] Configurating Language Filter', fg='yellow', dim=True) if verbose else None language_filter = language_filter.split(',') if verbose and no_retweets: click.secho('[v] Filtering out re-tweets.', fg='yellow') ctx.obj = Twitter(verbose=verbose, no_retweets=no_retweets, language_filter=language_filter, consumer_key=consumer_key, consumer_secret=consumer_secret, access_token=access_token, access_token_secret=access_token_secret) ctx.verbose = verbose
def cli(env, identifier): """Cancel all virtual guests of the dedicated host immediately. Use the 'slcli vs cancel' command to cancel an specific guest """ dh_mgr = SoftLayer.DedicatedHostManager(env.client) host_id = helpers.resolve_id(dh_mgr.resolve_ids, identifier, 'dedicated host') if not (env.skip_confirmations or formatting.no_going_back(host_id)): raise exceptions.CLIAbort('Aborted') table = formatting.Table(['id', 'server name', 'status']) result = dh_mgr.cancel_guests(host_id) if result: for status in result: table.add_row([ status['id'], status['fqdn'], status['status'] ]) env.fout(table) else: click.secho('There is not any guest into the dedicated host %s' % host_id, fg='red')
def run(ctx, selenium_processes, api_processes, force_serial, test_type, results_path, tests): """Runs all QA tests in parallel or serial. \b `qtr run` will run all tests in the test config file. [TESTS] arguments should be the file name of the test you want to run, without the .py extension. [TESTS] arguments work with Unix shell-style wildcards i.e. 'testc14*'. Each wildcard style argument must be enclosed in single quotes""" # Add command-line parameters to context ctx.processes['SELENIUM'] = selenium_processes ctx.processes['API'] = api_processes ctx.force_serial = force_serial ctx.test_type = test_type ctx.results_path = results_path load_config(ctx) ctx.test_file = ConfigObj(os.path.join(ctx.home, TEST_FILE_PATH)) ctx.tests = list(tests) # Create temporary folders to collect test results and other data if not os.path.exists(os.path.join(ctx.home, TMP_PATH)): os.mkdir(os.path.join(ctx.home, TMP_PATH)) if not os.path.exists(os.path.join(ctx.home, TMP_RESULTS_PATH)): os.mkdir(os.path.join(ctx.home, TMP_RESULTS_PATH)) # Create results queue and results queue flag ctx.manager = Manager() ctx.more_results = Value(c_bool, True) ctx.results_queue = ctx.manager.Queue() ctx.results_process = Process(target=results_processor, args=(ctx.results_queue, ctx.more_results, ctx.home, ctx.results_path)) ctx.results_process.start() # Load the appropriate tests and output number of tests test_dict, num_tests = load_tests(ctx) click.secho("Collected {0} test(s)".format(num_tests), fg='white', bold=True) # Run tests and collect timing data start_time = time.time() run_tests(ctx, test_dict) end_time = time.time() # Kill the results queue by setting the flag to false with ctx.more_results.get_lock(): ctx.results_queue.put("Testing is finished") ctx.more_results.value = False ctx.results_process.join() # Get the number of failed tests and output test results and timing data num_failed_tests = ctx.results_queue.get() if num_failed_tests == 0: click.secho("All tests passed!", fg='green', bold=True) elif num_failed_tests > 0: click.secho("Number of failed tests: {}".format(num_failed_tests), fg='red', bold=True) else: click.secho("Results process failed.", fg='red', bold=True) click.secho("Tests took {0} seconds".format(str(end_time - start_time)), fg='white', bold=True) # Clean up temporary files tmp_file_paths = glob.glob(os.path.join(ctx.home, TMP_PATH, "*")) for file_path in tmp_file_paths: os.remove(file_path) sys.exit(0) if num_failed_tests == 0 else sys.exit(1)
def reschedule(ctx): """Rearranges the Selenium tests in the config file so that they will be run in a more efficient way""" load_config(ctx) ctx.test_file = ConfigObj(os.path.join(ctx.home, TEST_FILE_PATH)) sys.path.insert(1, os.path.join(ctx.home, 'src/test/python/utility')) new_parallel_test_order = OrderedDict() # Get timing data from sauce labs test_times_dict = get_test_times(10000) # Apply a weight to test times and sort by shortest test times test_weighted_avg_list = average_and_sort(test_times_dict) # Rewrite the parallel test section of the config file parallel_section = ctx.test_file['Parallel']['Parallel'] parallel_tests = parallel_section.keys() new_parallel_order = [i[0] for i in test_weighted_avg_list if i[0] in parallel_tests] for test in new_parallel_order: new_parallel_test_order[test] = parallel_section[test] if len(parallel_tests) > len(new_parallel_order): missing_tests = set(parallel_tests) - set(new_parallel_order) for test in missing_tests: new_parallel_test_order[test] = parallel_section[test] ctx.test_file['Parallel']['Parallel'] = new_parallel_test_order ctx.test_file.write() click.secho("Rewrote config file", fg='white')
def validate(config): """ Check the pb_tool.cfg file for mandatory sections/files """ valid = True cfg = get_config(config) if not check_cfg(cfg, 'plugin', 'name'): valid = False if not check_cfg(cfg, 'files', 'python_files'): valid = False if not check_cfg(cfg, 'files', 'main_dialog'): valid = False if not check_cfg(cfg, 'files', 'resource_files'): valid = False if not check_cfg(cfg, 'files', 'extras'): valid = False if not check_cfg(cfg, 'help', 'dir'): valid = False if not check_cfg(cfg, 'help', 'target'): valid = False if valid: click.secho( "Your {0} file is valid and contains all mandatory items".format( config), fg='green') else: click.secho("Your {0} file is invalid".format(config), fg='red')
def get_test_times(num_tests): from sauceutils import SauceTools sauce = SauceTools("https://saucelabs.com", "polarqa", "d609b648-22e3-44bb-a38e-c28931df837d") jobs = [] last_time = int(time.time()) test_times = defaultdict(list) bar_length = int(math.ceil(num_tests/500)) with click.progressbar(xrange(bar_length), label="Downloading statistics from Sauce Labs", fill_char=click.style('+', fg='green', bold=True), empty_char=click.style('-', fg='red'), width=40) as bar: for i in bar: jobs += sauce.get_jobs(num_jobs=500, full=True, end_date=last_time) last_time = int(jobs[-1]['start_time']) # Only add tests that have passed for job in jobs: if job['passed'] and job['end_time']: test_times[job['name'].lower()].append([float(job['creation_time']), float(job['end_time']) - float(job['start_time'])]) click.secho("Sorted through statistics", fg='white') return test_times
def execute(func, *popenargs, **kwargs): if "env" not in kwargs: kwargs["env"] = get_subprocess_env() catch = kwargs.pop("catch", True) if kwargs.pop("silent", False): if "stdout" not in kwargs: kwargs["stdout"] = open(os.devnull, "w") if not is_windows(): # close file descriptor devnull after exit # unfortunately, close_fds is not supported on Windows # platforms if you redirect stdin/stdout/stderr # => http://svn.python.org/projects/python/ # branches/py3k/Lib/subprocess.py kwargs["close_fds"] = True if "stderr" not in kwargs: kwargs["stderr"] = subprocess.STDOUT try: return func(*popenargs, **kwargs) except subprocess.CalledProcessError as exc: if not catch: raise output = ( "There was an error trying to run a command. This is most likely", "not an issue with divio-cli, but the called program itself.", "Try checking the output of the command above.", "The command was:", " {command}".format(command=" ".join(exc.cmd)), ) hr(fg="red") click.secho(os.linesep.join(output), fg="red") sys.exit(1)
def show_server_banner(env, debug, app_import_path, eager_loading): """Show extra startup messages the first time the server is run, ignoring the reloader. """ if os.environ.get('WERKZEUG_RUN_MAIN') == 'true': return if app_import_path is not None: message = ' * Serving Flask app "{0}"'.format(app_import_path) if not eager_loading: message += ' (lazy loading)' click.echo(message) click.echo(' * Environment: {0}'.format(env)) if env == 'production': click.secho( ' WARNING: Do not use the development server in a production' ' environment.', fg='red') click.secho(' Use a production WSGI server instead.', dim=True) if debug is not None: click.echo(' * Debug mode: {0}'.format('on' if debug else 'off'))
def load_config_key(): """Load API key from config file, write if needed""" global api_token try: api_token = os.environ['SOCCER_CLI_API_TOKEN'] except KeyError: home = os.path.expanduser("~") config = os.path.join(home, ".soccer-cli.ini") if not os.path.exists(config): with open(config, "w") as cfile: key = get_input_key() cfile.write(key) else: with open(config, "r") as cfile: key = cfile.read() if key: api_token = key else: os.remove(config) # remove 0-byte file click.secho('No API Token detected. ' 'Please visit {0} and get an API Token, ' 'which will be used by Soccer CLI ' 'to get access to the data.' .format(BASE_URL), fg="red", bold=True) sys.exit(1) return api_token
def check_platformio_upgrade(): last_check = app.get_state_item("last_check", {}) interval = int(app.get_setting("check_platformio_interval")) * 3600 * 24 if (time() - interval) < last_check.get("platformio_upgrade", 0): return last_check['platformio_upgrade'] = int(time()) app.set_state_item("last_check", last_check) try: latest_version = get_latest_version() except GetLatestVersionError: click.secho("Failed to check for PlatformIO upgrades", fg="red") return if (latest_version == __version__ or Upgrader.version_to_int(latest_version) < Upgrader.version_to_int(__version__)): return click.secho("There is a new version %s of PlatformIO available.\n" "Please upgrade it via " % latest_version, fg="yellow", nl=False) click.secho("platformio upgrade", fg="cyan", nl=False) click.secho(" command.\nChanges: ", fg="yellow", nl=False) click.secho("http://docs.platformio.org/en/latest/history.html\n", fg="cyan")
def print_items(self, message, item_ids): """Print the items. :type message: str :param message: A message to print out to the user before outputting the results. :type item_ids: iterable :param item_ids: A collection of items to print. Can be a list or dictionary. """ self.config.item_ids = [] index = 1 for item_id in item_ids: try: item = self.hacker_news_api.get_item(item_id) if item.title: formatted_item = self.format_item(item, index) self.config.item_ids.append(item.item_id) click.echo(formatted_item) index += 1 except InvalidItemID: self.print_item_not_found(item_id) self.config.save_cache() if self.config.show_tip: click.secho(self.tip_view(str(index-1)))
def cli(env, **args): """Order/create virtual servers.""" vsi = SoftLayer.VSManager(env.client) _validate_args(env, args) create_args = _parse_create_args(env.client, args) test = args.get('test', False) do_create = not (args.get('export') or test) if do_create: if not (env.skip_confirmations or formatting.confirm( "This action will incur charges on your account. Continue?")): raise exceptions.CLIAbort('Aborting virtual server order.') if args.get('export'): export_file = args.pop('export') template.export_to_template(export_file, args, exclude=['wait', 'test']) env.fout('Successfully exported options to a template file.') else: result = vsi.order_guest(create_args, test) output = _build_receipt_table(result, args.get('billing'), test) if do_create: env.fout(_build_guest_table(result)) env.fout(output) if args.get('wait'): virtual_guests = utils.lookup(result, 'orderDetails', 'virtualGuests') guest_id = virtual_guests[0]['id'] click.secho("Waiting for %s to finish provisioning..." % guest_id, fg='green') ready = vsi.wait_for_ready(guest_id, args.get('wait') or 1) if ready is False: env.out(env.fmt(output)) raise exceptions.CLIHalt(code=1)
def difflocal(backup_context): """Diff Dropbox and working dir contents. """ fmt = "{:<40}{:<20}" print(fmt.format("File name", "Status")) query = "SELECT * FROM files ORDER BY Filename" for row in backup_context.dbcursor.execute(query): filename = row["Filename"] in_dropbox = row["InDropbox"] in_workdir = row["InWorkingDir"] dropbox_file_abspath = backup_context.dropbox_camera_uploads_dir / filename file_ext = os.path.splitext(filename)[1] if file_ext in (backup_context.video_file_extensions): workdir_file_abspath = backup_context.local_working_dir / "video" / filename else: workdir_file_abspath = backup_context.local_working_dir / filename if in_dropbox == 1 and in_workdir == 1: if filecmp.cmp(dropbox_file_abspath, workdir_file_abspath, shallow=False): print(fmt.format(filename, "👍 diff OK")) else: print(fmt.format(filename, "❌")) elif in_dropbox == 1 and in_workdir == 0: click.secho(fmt.format(filename, "dropbox only"), bg="red", fg="white") # Silencing since this is a little verbose: #elif in_dropbox == 0 and in_workdir == 1: # click.secho(fmt.format(filename, "workdir only")) elif in_dropbox == 0 and in_workdir == 0 and in_s3 == 1: click.secho(fmt.format(filename, "s3 only"), bg="blue", fg="white")
def print_item_not_found(self, item_id): """Print a message the given item id was not found. :type item_id: int :param item_id: The item's id. """ click.secho('Item with id {0} not found.'.format(item_id), fg='red')
def cli(platforms, with_package, without_package, skip_default_package): for platform in platforms: p = PlatformFactory.newPlatform(platform) if p.install(with_package, without_package, skip_default_package): click.secho("The platform '%s' has been successfully installed!" % platform, fg="green")
def run_test(test): click.secho(test.__name__, bold=True) return_code = test() return return_code
def remove_app(app_name, dry_run=False, yes=False, no_backup=False, force=False): """Remove app and all linked to the app's module with the app from a site.""" # dont allow uninstall app if not installed unless forced if not force: if app_name not in frappe.get_installed_apps(): click.secho("App {0} not installed on Site {1}".format( app_name, frappe.local.site), fg="yellow") return print("Uninstalling App {0} from Site {1}...".format( app_name, frappe.local.site)) if not dry_run and not yes: confirm = click.confirm( "All doctypes (including custom), modules related to this app will be deleted. Are you sure you want to continue?" ) if not confirm: return if not no_backup: from frappe.utils.backups import scheduled_backup print("Backing up...") scheduled_backup(ignore_files=True) frappe.flags.in_uninstall = True drop_doctypes = [] # remove modules, doctypes, roles for module_name in frappe.get_module_list(app_name): for doctype in frappe.get_list("DocType", filters={"module": module_name}, fields=["name", "issingle"]): print("removing DocType {0}...".format(doctype.name)) if not dry_run: frappe.delete_doc("DocType", doctype.name) if not doctype.issingle: drop_doctypes.append(doctype.name) linked_doctypes = frappe.get_all("DocField", filters={ "fieldtype": "Link", "options": "Module Def" }, fields=['parent']) ordered_doctypes = ["Report", "Page", "Web Form"] doctypes_with_linked_modules = ordered_doctypes + [ doctype.parent for doctype in linked_doctypes if doctype.parent not in ordered_doctypes ] for doctype in doctypes_with_linked_modules: for record in frappe.get_list(doctype, filters={"module": module_name}): print("removing {0} {1}...".format(doctype, record.name)) if not dry_run: frappe.delete_doc(doctype, record.name) print("removing Module {0}...".format(module_name)) if not dry_run: frappe.delete_doc("Module Def", module_name) remove_from_installed_apps(app_name) if not dry_run: # drop tables after a commit frappe.db.commit() for doctype in set(drop_doctypes): frappe.db.sql("drop table `tab{0}`".format(doctype)) click.secho("Uninstalled App {0} from Site {1}".format( app_name, frappe.local.site), fg="green") frappe.flags.in_uninstall = False
def alice( click_config, action, # Mode dev, force, dry_run, # Network teacher_uri, min_stake, federated_only, network, discovery_port, controller_port, # Filesystem config_root, config_file, # Blockchain pay_with, provider_uri, geth, sync, poa, no_registry, registry_filepath, device, # Alice bob_encrypting_key, bob_verifying_key, label, m, n, value, rate, duration, expiration, message_kit, ): # # Validate # if federated_only and geth: raise click.BadOptionUsage( option_name="--geth", message="Federated only cannot be used with the --geth flag") # Banner click.clear() if not click_config.json_ipc and not click_config.quiet: click.secho(ALICE_BANNER) # # Managed Ethereum Client # ETH_NODE = NO_BLOCKCHAIN_CONNECTION if geth: ETH_NODE = actions.get_provider_process() provider_uri = ETH_NODE.provider_uri(scheme='file') # # Eager Actions (No Authentication Required) # if action == 'init': """Create a brand-new persistent Alice""" if dev: raise click.BadArgumentUsage( "Cannot create a persistent development character") if not config_root: # Flag config_root = click_config.config_file # Envvar new_alice_config = AliceConfiguration.generate( password=get_password(confirm=True), config_root=config_root, checksum_address=pay_with, domains={network} if network else None, federated_only=federated_only, download_registry=no_registry, registry_filepath=registry_filepath, provider_process=ETH_NODE, poa=poa, provider_uri=provider_uri, m=m, n=n, duration=duration, rate=rate) painting.paint_new_installation_help( new_configuration=new_alice_config) return # Exit elif action == "view": """Paint an existing configuration to the console""" configuration_file_location = config_file or AliceConfiguration.default_filepath( ) response = AliceConfiguration._read_configuration_file( filepath=configuration_file_location) click_config.emit(response) return # Exit # # Make Alice # if dev: alice_config = AliceConfiguration( dev_mode=True, network_middleware=click_config.middleware, domains={network}, provider_process=ETH_NODE, provider_uri=provider_uri, federated_only=True) else: try: alice_config = AliceConfiguration.from_configuration_file( dev_mode=False, filepath=config_file, domains={network} if network else None, network_middleware=click_config.middleware, rest_port=discovery_port, checksum_address=pay_with, provider_process=ETH_NODE, provider_uri=provider_uri) except FileNotFoundError: return actions.handle_missing_configuration_file( character_config_class=AliceConfiguration, config_file=config_file) ALICE = actions.make_cli_character(character_config=alice_config, click_config=click_config, dev=dev, teacher_uri=teacher_uri, min_stake=min_stake) # # Admin Actions # if action == "run": """Start Alice Controller""" # RPC if click_config.json_ipc: rpc_controller = ALICE.make_rpc_controller() _transport = rpc_controller.make_control_transport() rpc_controller.start() return # HTTP else: ALICE.controller.emitter( message=f"Alice Verifying Key {bytes(ALICE.stamp).hex()}", color="green", bold=True) controller = ALICE.make_web_controller( crash_on_error=click_config.debug) ALICE.log.info('Starting HTTP Character Web Controller') return controller.start(http_port=controller_port, dry_run=dry_run) elif action == "destroy": """Delete all configuration files from the disk""" if dev: message = "'nucypher alice destroy' cannot be used in --dev mode" raise click.BadOptionUsage(option_name='--dev', message=message) return actions.destroy_configuration(character_config=alice_config, force=force) # # Alice API # elif action == "public-keys": response = ALICE.controller.public_keys() return response elif action == "derive-policy-pubkey": # Validate if not label: raise click.BadOptionUsage( option_name='label', message= "--label is required for deriving a policy encrypting key.") # Request return ALICE.controller.derive_policy_encrypting_key(label=label) elif action == "grant": # Validate if not all((bob_verifying_key, bob_encrypting_key, label)): raise click.BadArgumentUsage( message= "--bob-verifying-key, --bob-encrypting-key, and --label are " "required options to grant (optionally --m, --n, and --expiration)." ) # Request grant_request = { 'bob_encrypting_key': bob_encrypting_key, 'bob_verifying_key': bob_verifying_key, 'label': label, 'm': m, 'n': n, 'expiration': expiration, } if not ALICE.federated_only: grant_request.update({'value': value}) return ALICE.controller.grant(request=grant_request) elif action == "revoke": # Validate if not label and bob_verifying_key: raise click.BadArgumentUsage( message= f"--label and --bob-verifying-key are required options for revoke." ) # Request revoke_request = { 'label': label, 'bob_verifying_key': bob_verifying_key } return ALICE.controller.revoke(request=revoke_request) elif action == "decrypt": # Validate if not all((label, message_kit)): input_specification, output_specification = ALICE.controller.get_specifications( interface_name=action) required_fields = ', '.join(input_specification) raise click.BadArgumentUsage( f'{required_fields} are required flags to decrypt') # Request request_data = {'label': label, 'message_kit': message_kit} response = ALICE.controller.decrypt(request=request_data) return response else: raise click.BadArgumentUsage(f"No such argument {action}")
def cmd_remove_account(config, id): acc = remove_account(config, id) click.secho( f'Account removed: {acc.id} - {acc.name}', fg='green', )
def cmd_add_account(config, api_key, endpoint): account_id, name = add_account(config, api_key, endpoint) if not config.silent: click.secho(f'New account added: {account_id} - {name}', fg='green')
def cli(obj, ids, query, filters, display, from_date=None): """Query for alerts based on search filter criteria.""" client = obj['client'] timezone = obj['timezone'] if ids: query = [('id', x) for x in ids] elif query: query = [('q', query)] else: query = build_query(filters) if from_date: query.append(('from-date', from_date)) r = client.http.get('/alerts', query, page=1, page_size=1000) if obj['output'] == 'json': click.echo( json.dumps(r['alerts'], sort_keys=True, indent=4, ensure_ascii=False)) elif obj['output'] in ['json_lines', 'jsonl', 'ndjson']: for alert in r['alerts']: click.echo(json.dumps(alert, ensure_ascii=False)) else: alerts = [Alert.parse(a) for a in r['alerts']] last_time = r['lastTime'] auto_refresh = r['autoRefresh'] if display == 'tabular': headers = { 'id': 'ID', 'lastReceiveTime': 'LAST RECEIVED', 'severity': 'SEVERITY', 'status': 'STATUS', 'duplicateCount': 'DUPL', 'customer': 'CUSTOMER', 'environment': 'ENVIRONMENT', 'service': 'SERVICE', 'resource': 'RESOURCE', 'group': 'GROUP', 'event': 'EVENT', 'value': 'VALUE', 'text': 'TEXT' } click.echo( tabulate([a.tabular('summary', timezone) for a in alerts], headers=headers, tablefmt=obj['output'])) elif display in ['compact', 'details']: for alert in reversed(alerts): color = COLOR_MAP.get(alert.severity, {'fg': 'white'}) click.secho( '{}|{}|{}|{:5d}|{}|{:<5s}|{:<10s}|{:<18s}|{:12s}|{:16s}|{:12s}' .format( alert.id[0:8], DateTime.localtime(alert.last_receive_time, timezone), alert.severity, alert.duplicate_count, alert.customer or '-', alert.environment, ','.join(alert.service), alert.resource, alert.group, alert.event, alert.value or 'n/a'), fg=color['fg']) click.secho(' |{}'.format(alert.text), fg=color['fg']) if display == 'details': click.secho(' severity | {} -> {}'.format( alert.previous_severity, alert.severity), fg=color['fg']) click.secho(' trend | {}'.format( alert.trend_indication), fg=color['fg']) click.secho(' status | {}'.format(alert.status), fg=color['fg']) click.secho(' resource | {}'.format(alert.resource), fg=color['fg']) click.secho(' group | {}'.format(alert.group), fg=color['fg']) click.secho(' event | {}'.format(alert.event), fg=color['fg']) click.secho(' value | {}'.format(alert.value), fg=color['fg']) click.secho(' tags | {}'.format(' '.join( alert.tags)), fg=color['fg']) for key, value in alert.attributes.items(): click.secho(' {} | {}'.format(key.ljust(10), value), fg=color['fg']) latency = alert.receive_time - alert.create_time click.secho(' time created | {}'.format( DateTime.localtime(alert.create_time, timezone)), fg=color['fg']) click.secho(' time received | {}'.format( DateTime.localtime(alert.receive_time, timezone)), fg=color['fg']) click.secho(' last received | {}'.format( DateTime.localtime(alert.last_receive_time, timezone)), fg=color['fg']) click.secho(' latency | {}ms'.format( (latency.microseconds / 1000)), fg=color['fg']) click.secho(' timeout | {}s'.format( alert.timeout), fg=color['fg']) click.secho(' alert id | {}'.format( alert.id), fg=color['fg']) click.secho(' last recv id | {}'.format( alert.last_receive_id), fg=color['fg']) click.secho(' customer | {}'.format( alert.customer), fg=color['fg']) click.secho(' environment | {}'.format( alert.environment), fg=color['fg']) click.secho(' service | {}'.format( ','.join(alert.service)), fg=color['fg']) click.secho(' resource | {}'.format( alert.resource), fg=color['fg']) click.secho(' type | {}'.format( alert.event_type), fg=color['fg']) click.secho(' repeat | {}'.format( alert.repeat), fg=color['fg']) click.secho(' origin | {}'.format( alert.origin), fg=color['fg']) click.secho(' correlate | {}'.format( ','.join(alert.correlate)), fg=color['fg']) return auto_refresh, last_time
def do_cli( # pylint: disable=too-many-locals, too-many-statements function_identifier: Optional[str], template: str, base_dir: Optional[str], build_dir: str, cache_dir: str, clean: bool, use_container: bool, cached: bool, parallel: bool, manifest_path: Optional[str], docker_network: Optional[str], skip_pull_image: bool, parameter_overrides: Dict, mode: Optional[str], ) -> None: """ Implementation of the ``cli`` method """ from samcli.commands.exceptions import UserException from samcli.commands.build.build_context import BuildContext from samcli.lib.build.app_builder import ( ApplicationBuilder, BuildError, UnsupportedBuilderLibraryVersionError, ContainerBuildNotSupported, ) from samcli.lib.build.workflow_config import UnsupportedRuntimeException from samcli.local.lambdafn.exceptions import FunctionNotFound from samcli.commands._utils.template import move_template from samcli.lib.build.build_graph import InvalidBuildGraphException LOG.debug("'build' command is called") if cached: LOG.info("Starting Build use cache") if use_container: LOG.info("Starting Build inside a container") with BuildContext( function_identifier, template, base_dir, build_dir, cache_dir, cached, clean=clean, manifest_path=manifest_path, use_container=use_container, parameter_overrides=parameter_overrides, docker_network=docker_network, skip_pull_image=skip_pull_image, mode=mode, ) as ctx: try: builder = ApplicationBuilder( ctx.resources_to_build, ctx.build_dir, ctx.base_dir, ctx.cache_dir, ctx.cached, ctx.is_building_specific_resource, manifest_path_override=ctx.manifest_path_override, container_manager=ctx.container_manager, mode=ctx.mode, parallel=parallel, ) except FunctionNotFound as ex: raise UserException(str(ex), wrapped_from=ex.__class__.__name__) from ex try: artifacts = builder.build() stack_output_template_path_by_stack_path = { stack.stack_path: stack.get_output_template_path(ctx.build_dir) for stack in ctx.stacks } for stack in ctx.stacks: modified_template = builder.update_template( stack, artifacts, stack_output_template_path_by_stack_path, ) move_template(stack.location, stack.get_output_template_path(ctx.build_dir), modified_template) click.secho("\nBuild Succeeded", fg="green") # try to use relpath so the command is easier to understand, however, # under Windows, when SAM and (build_dir or output_template_path) are # on different drive, relpath() fails. root_stack = SamLocalStackProvider.find_root_stack(ctx.stacks) out_template_path = root_stack.get_output_template_path( ctx.build_dir) try: build_dir_in_success_message = os.path.relpath(ctx.build_dir) output_template_path_in_success_message = os.path.relpath( out_template_path) except ValueError: LOG.debug( "Failed to retrieve relpath - using the specified path as-is instead" ) build_dir_in_success_message = ctx.build_dir output_template_path_in_success_message = out_template_path msg = gen_success_msg( build_dir_in_success_message, output_template_path_in_success_message, os.path.abspath( ctx.build_dir) == os.path.abspath(DEFAULT_BUILD_DIR), ) click.secho(msg, fg="yellow") except ( UnsupportedRuntimeException, BuildError, BuildInsideContainerError, UnsupportedBuilderLibraryVersionError, ContainerBuildNotSupported, InvalidBuildGraphException, ) as ex: click.secho("\nBuild Failed", fg="red") # Some Exceptions have a deeper wrapped exception that needs to be surfaced # from deeper than just one level down. deep_wrap = getattr(ex, "wrapped_from", None) wrapped_from = deep_wrap if deep_wrap else ex.__class__.__name__ raise UserException(str(ex), wrapped_from=wrapped_from) from ex
def print_no_file_found(): c.secho('\nNo file found in data dir!', fg='red', bold=True) c.secho('For processing local files put state/trace files into:') c.secho('└──' + data.LOCAL_LOG_DATA_PATH)
def print_processing_info(index, file, all_files): c.secho('◼', fg='blue') c.secho('├── ', nl=False) c.secho('[{} of {}]'.format(index, len(all_files)), fg='green') c.secho('├── ', nl=False) c.secho('file: ', fg='green', nl=False) c.secho('{}'.format(file))
def print_download_info(index, url, size, local=True): c.secho('◼', fg='blue') c.secho('├── ', nl=False) c.secho('[{} of {}]'.format(index, len(b.GAME_NUMBERS)), fg='green') c.secho('├── ', nl=False) c.secho('url: ', fg='green', nl=False) c.secho('{}'.format(url), nl=False) c.secho(' size: ', fg='green', nl=False) c.secho('{} mb'.format(size)) c.secho('└── ', nl=False) c.secho('download and extract...', fg='white', nl=False)
def _cmd(self, name, args=None, options=None, env=None, **kwargs): exe = which(name, self.paths) option_args = ["{0}={1}".format(key, value) for key, value in six.iteritems(options or {})] c = [exe] + option_args + as_list(args or []) if self.verbose: click.secho(' '.join(c), bold=True) return cmd(c, env=as_dict_str(merge(self.env, self._get_paths_env(), env)), **kwargs)
def print_local_intro(): c.secho('\nstart processing local log-files', fg='green') c.secho('└── executed: {}'.format(datetime.datetime.now()))
def print_url(title, url): """Pretty-print a URL on the terminal.""" click.secho(" %s: " % title, nl=False, fg='blue') click.secho(url, bold=True)
def cloud(name, project, version, watch, logs): """ Run a deployed flow in Prefect Cloud. \b Options: --name, -n TEXT The name of a flow to run [required] --project, -p TEXT The name of a project that contains the flow [required] --version, -v INTEGER A flow version to run --watch, -w Watch current state of the flow run, stream output to stdout --logs, -l Get logs of the flow run, stream output to stdout """ if watch and logs: click.secho( "Streaming state and logs not currently supported together.", fg="red") return query = { "query": { with_args( "flow", { "where": { "_and": { "name": { "_eq": name }, "version": { "_eq": version }, "project": { "name": { "_eq": project } }, } }, "order_by": { "name": EnumValue("asc"), "version": EnumValue("desc"), }, "distinct_on": EnumValue("name"), }, ): { "id": True } } } client = Client() result = client.graphql(query) flow_data = result.data.flow if flow_data: flow_id = flow_data[0].id else: click.secho("{} not found".format(name), fg="red") return flow_run_id = client.create_flow_run(flow_id=flow_id) click.echo("Flow Run ID: {}".format(flow_run_id)) if watch: current_state = "" while True: query = { "query": { with_args("flow_run_by_pk", {"id": flow_run_id}): { "state": True } } } result = client.graphql(query) if result.data.flow_run_by_pk.state != current_state: current_state = result.data.flow_run_by_pk.state if current_state != "Success" and current_state != "Failed": click.echo("{} -> ".format(current_state), nl=False) else: click.echo(current_state) break time.sleep(3) if logs: all_logs = [] log_query = { with_args("logs", { "order_by": { EnumValue("timestamp"): EnumValue("asc") } }): { "timestamp": True, "message": True, "level": True }, "start_time": True, } query = { "query": { with_args( "flow_run", { "where": { "id": { "_eq": flow_run_id } }, "order_by": { EnumValue("start_time"): EnumValue("desc") }, }, ): log_query } } while True: result = Client().graphql(query) flow_run = result.data.flow_run if not flow_run: click.secho("{} not found".format(flow_run_id), fg="red") return new_run = flow_run[0] logs = new_run.logs output = [] for i in logs: if [i.timestamp, i.level, i.message] not in all_logs: if not len(all_logs): click.echo( tabulate( [[i.timestamp, i.level, i.message]], headers=["TIMESTAMP", "LEVEL", "MESSAGE"], tablefmt="plain", numalign="left", stralign="left", )) all_logs.append([i.timestamp, i.level, i.message]) continue output.append([i.timestamp, i.level, i.message]) all_logs.append([i.timestamp, i.level, i.message]) if output: click.echo( tabulate(output, tablefmt="plain", numalign="left", stralign="left")) # Check if state is either Success or Failed, exit if it is query = { "query": { with_args("flow_run_by_pk", {"id": flow_run_id}): { "state": True } } } result = client.graphql(query) if (result.data.flow_run_by_pk.state == "Success" or result.data.flow_run_by_pk.state == "Failed"): return time.sleep(3)
def _test_case(config: Config, codefile: CodeFile, case_name: str, infile: Path, expfile: Path) -> RunResult: # {{{ # run program click.secho('-' * 10 + case_name + '-' * 10, fg='blue') run_result = codefile.run(config, infile) def smart_print(strs, func=print, limit_of_lines=config.pref['test']['limit_height_max_output'], limit_of_width=config.pref['test']['limit_width_max_output']): n = len(strs) x = limit_of_lines y = limit_of_width def print_line(line): if len(line) <= 2 * y: func(line) else: func(line[:y] + ' ~~~ ' + line[len(line) - y:len(line)]) if n <= 2 * x: for line in lines: print_line(line) else: for i in range(0, x): print_line(lines[i]) print_line("~~~") print_line("~~~") for i in range(n - x, n): print_line(lines[i]) # print input with open(infile, 'r') as f: print('*' * 7 + ' input ' + '*' * 7) lines = f.read().split('\n') smart_print(lines, limit_of_lines=30) # print expected expfile_exist = True try: with open(expfile, 'r') as f: print('*' * 7 + ' expected ' + '*' * 7) exp_str = f.read() lines = exp_str.split('\n') smart_print(lines) exp = exp_str.split('\n') except FileNotFoundError: print('*' * 7 + ' expected ' + '*' * 7) click.secho(f"expected file:[{expfile.name}] not found\n", fg='yellow') exp = [''] expfile_exist = False # print result print('*' * 7 + ' stdout ' + '*' * 7) lines = run_result.stdout.split('\n') smart_print(lines) stdout = run_result.stdout.split('\n') # print stderr message print('*' * 7 + ' stderr ' + '*' * 7) lines = run_result.stderr.split('\n') def print_stderr(line): line = line.replace(str(codefile.code_dir), "") click.secho(line, fg='yellow') smart_print(lines, func=print_stderr) for line in run_result.stderr.split('\n'): if re.search('runtime error', line): click.secho('--RE--\n', fg='red') run_result.judge = JudgeResult.RE return run_result # compare result and expected if run_result.TLE_flag: click.secho('--TLE--\n', fg='red') run_result.judge = JudgeResult.TLE return run_result if run_result.returncode != 0: SIGMAP = dict((int(k), v) for v, k in reversed(sorted(signal.__dict__.items())) if v.startswith('SIG') and not v.startswith('SIG_')) click.secho(f'--RE--', fg='red') click.secho(f':{SIGMAP[abs(run_result.returncode)]}' if abs(run_result.returncode) in SIGMAP.keys() else str(abs(run_result.returncode)), fg='red') print('\n') run_result.judge = JudgeResult.RE return run_result if run_result.used_memory > config.pref['test']['max_memory']: click.secho('--MLE--\n', fg='red') run_result.judge = JudgeResult.MLE return run_result # 最後の空白行は無視する。 while stdout and stdout[-1] == '': stdout.pop() while exp and exp[-1] == '': exp.pop() if not expfile_exist: click.secho('--NOEXP--\n', fg='yellow') run_result.judge = JudgeResult.NOEXP elif len(exp) == 0: click.secho('--WA--\n', fg='red') run_result.judge = JudgeResult.WA elif re.search('TLE.*naive.*', exp[0]): click.secho('TLENAIVE\n', fg='yellow') run_result.judge = JudgeResult.TLENAIVE elif len(stdout) != len(exp): click.secho('--WA--\n', fg='red') run_result.judge = JudgeResult.WA else: for i in range(len(stdout)): if stdout[i].replace('\r', '').rstrip() != exp[i].rstrip(): click.secho('--WA--\n\n', fg='red') run_result.judge = JudgeResult.WA break else: click.secho('--AC--\n', fg='green') run_result.judge = JudgeResult.AC return run_result
def find_appointment( user, password, region, bookingtype, specialization, clinic, doctor, start_date, end_date, start_time, end_time, service, interval, days_ahead, enable_notifier, notification_title, ): if end_date: start_date_dt = datetime.strptime(start_date, "%Y-%m-%d") end_date_dt = datetime.strptime(end_date, "%Y-%m-%d") diff = end_date_dt - start_date_dt days_ahead = diff.days valid = validate_arguments(bookingtype=bookingtype, specialization=specialization, service=service) if not valid: return iteration_counter = 1 med_session = MedicoverSession(username=user, password=password) try: med_session.log_in() except Exception: click.secho("Unsuccessful logging in", fg="red") return click.echo("Logged in") med_session.load_search_form() while interval > 0 or iteration_counter < 2: appointments = [] start_date_param = start_date for _ in range(days_ahead): found_appointments = med_session.search_appointments( region=region, bookingtype=bookingtype, specialization=specialization, clinic=clinic, doctor=doctor, start_date=start_date_param, end_date=end_date, start_time=start_time, end_time=end_time, service=service, ) if not found_appointments: break appointment_datetime = found_appointments[-1].appointment_datetime appointment_datetime = datetime.strptime(appointment_datetime, "%Y-%m-%dT%H:%M:%S") appointment_datetime = appointment_datetime + timedelta(days=1) start_date_param = appointment_datetime.date().isoformat() appointments.extend(found_appointments) if not appointments: click.echo( click.style( f"(iteration: {iteration_counter}) No results found", fg="yellow")) else: process_appointments( appointments, iteration_counter, notifier=enable_notifier, notification_title=notification_title, ) iteration_counter += 1 time.sleep(interval * 60)
def explore(startup_command: str, startup_script: str, hook_debug: bool, quiet: bool, file_commands) -> None: """ Start the objection exploration REPL. """ # specify if hooks should be debugged app_state.debug_hooks = hook_debug # start the main REPL r = Repl() # if we have a command to run, do that first before # the call to get_device_info(). if startup_command: for command in startup_command: click.secho('Running a startup command... {0}'.format(command), dim=True) r.run_command(command) # if we have a startup script to run, use the 'import' command # and give it the users path. if startup_script: click.secho('Importing and running a startup script...', dim=True) r.run_command('import {0}'.format(startup_script)) try: # poll the device for information. this method also sets # the device type internally in state.device device_info = get_device_info() except (frida.TimedOutError, frida.ServerNotRunningError, frida.ProcessNotFoundError, frida.NotSupportedError) as e: click.secho('Could not connect with error: {0}'.format(str(e)), fg='red') print_frida_connection_help() return # process commands from a resource file if file_commands: click.secho('Running commands from file...', bold=True) for command in file_commands.readlines(): # clean up newlines command = command.strip() # do nothing for empty lines if command == '': continue # run the command using the instantiated repl click.secho('Running: \'{0}\':\n'.format(command), dim=True) r.run_command(command) warn_about_older_operating_systems() # run the REPL and wait for more commands r.set_prompt_tokens(device_info) r.start_repl(quiet=quiet)
def print_stderr(line): line = line.replace(str(codefile.code_dir), "") click.secho(line, fg='yellow')
def version() -> None: """ Prints the current version and exists. """ click.secho('objection: {0}'.format(__version__))
infile_contents = [] while True: line = input() if line: infile_contents.append(line + '\n') else: break with open(infile, mode='w') as f: f.writelines(infile_contents) expfile = Path(tempfile.NamedTemporaryFile().name) elif case in set(map(str, range(1, 101))): case = f'sample-{case}' infile = test_dir / f"{case}.in" expfile = test_dir / f"{case}.out" if (not infile.exists()): click.secho(f"{infile.name} not found.", fg='yellow') return 1 else: infile = test_dir / f"{case}.in" expfile = test_dir / f"{case}.out" if not infile.exists(): click.secho(f"{infile.name} not found.", fg='yellow') return 1 run_result = _test_case(config, solve_codefile, case, infile, expfile) print('[exec time]: {:.3f}'.format(run_result.exec_time), '[sec]') print('[used memory]: {:.3f}'.format(run_result.used_memory), '[MB]') print('') else: # random test solve_codefile = CodeFile(code_filename, exclude_filename_pattern=([case, by] if by else [case]))
def error(message): click.secho(message, bold=True, fg="red") sys.exit(-1)
def device_type(): """ Get information about an attached device. """ try: device_name, system_name, model, system_version = get_device_info() except frida.ProcessNotFoundError as e: click.secho('Could not connect with error: {0}'.format(str(e)), fg='red') print_frida_connection_help() return if state_connection.get_comms_type() == state_connection.TYPE_USB: click.secho('Connection: USB') elif state_connection.get_comms_type() == state_connection.TYPE_REMOTE: click.secho('Connection: Network') click.secho('Name: {0}'.format(device_name)) click.secho('System: {0}'.format(system_name)) click.secho('Model: {0}'.format(model)) click.secho('Version: {0}'.format(system_version))
def warn(message): click.secho(message, fg="yellow", bold=True)
def diff(): """Git like diff tool to compare sentence generated by our tokenizer vs actual list of sentences.""" click.secho("Loading corpus...") raw, sents = load_raw_corpus(False), load_sentence_corpus() y_true = [doc['sentences'] for doc in sents] y_pred = [Doc(doc).sents for doc in raw] paths = file_paths() for i in range(len(y_true)): if y_true[i] != y_pred[i]: click.secho(f"Document {paths[i]}") for s_true in y_true[i]: if s_true not in y_pred[i]: click.secho(f"+ {s_true}", fg="green") click.secho() for s_pred in y_pred[i]: if s_pred not in y_true[i]: click.secho(f"- {s_pred}", fg="red") click.secho() click.secho()
def cli(prob_id, filename): # get latest submission id, so when submitting should have not equal id last_id, b, c, d, e = get_latest_verdict(config.username) # Browse to Codeforces browser = RoboBrowser(parser='html.parser') browser.open('http://codeforces.com/enter') enter_form = browser.get_form('enterForm') enter_form['handleOrEmail'] = config.username enter_form['password'] = config.password browser.submit_form(enter_form) try: checks = list( map(lambda x: x.getText()[1:].strip(), browser.select('div.caption.titled'))) if config.username not in checks: click.secho('Login Failed.. Wrong password.', fg='red') return except Exception as e: click.secho('Login Failed.. Maybe wrong id/password.', fg='red') return click.secho('[{0}] login successful! '.format(config.username), fg='green') click.secho('Submitting [{1}] for problem [{0}]'.format(prob_id, filename), fg='green') browser.open('http://codeforces.com/problemset/submit') submit_form = browser.get_form(class_='submit-form') submit_form['submittedProblemCode'] = prob_id try: submit_form['sourceFile'] = filename except Exception as e: click.secho('File {0} not found in current directory'.format(filename)) return browser.submit_form(submit_form) if browser.url[-6:] != 'status': click.secho( 'Failed submission, probably you have submit the same file before', fg='red') return click.secho('[{0}] submitted ...'.format(filename), fg='green') hasStarted = False while True: id_, verdict_, time_, memory_, passedTestCount_ = get_latest_verdict( config.username) if id_ != last_id and verdict_ != 'TESTING' and verdict_ != None: if verdict_ == 'OK': click.secho('OK - Passed {} tests'.format(passedTestCount_), fg='green') else: click.secho("{} on test {}".format(verdict_, passedTestCount_ + 1), fg='red') click.secho('{} MS | {} KB'.format(time_, memory_), fg=('green' if verdict_ == 'OK' else 'red')) break elif verdict_ == 'TESTING' and (not hasStarted): click.secho("Judgment has begun", fg='green') hasStarted = True time.sleep(0.5)
def info(): import q2cli.util # This import improves performance for repeated _echo_plugins import q2cli.core.cache click.secho('System versions', fg='green') _echo_version() click.secho('\nInstalled plugins', fg='green') _echo_plugins() click.secho('\nApplication config directory', fg='green') click.secho(q2cli.util.get_app_dir()) click.secho('\nGetting help', fg='green') click.secho('To get help with QIIME 2, visit https://qiime2.org')
def repair_bugs(runtime, advisory, auto, id, original_state, new_state, noop, default_advisory_type): """Move bugs attached to the advisory from one state to another state. This is useful if the bugs have changed states *after* they were attached. Similar to `find-bugs` but in reverse. `repair-bugs` begins by reading bugs from an advisory, whereas `find-bugs` reads from bugzilla. This looks at attached bugs in the provided --from state and moves them to the provided --to state. \b Background: This is intended for bugs which went to MODIFIED, were attached to advisories, set to ON_QA, and then failed testing. When this happens their state is reset back to ASSIGNED. Using --use-default-advisory without a value set for the matching key in the build-data will cause an error and elliott will exit in a non-zero state. Most likely you will only want to use the `rpm` state, but that could change in the future. Use of this option conflicts with providing an advisory with the -a/--advisory option. Move bugs on 123456 FROM the MODIFIED state back TO ON_QA state: \b $ elliott --group=openshift-4.1 repair-bugs --auto --advisory 123456 --from MODIFIED --to ON_QA As above, but using the default RPM advisory defined in ocp-build-data: \b $ elliott --group=openshift-4.1 repair-bugs --auto --use-default-advisory rpm --from MODIFIED --to ON_QA The previous examples could also be ran like this (MODIFIED and ON_QA are both defaults): \b $ elliott --group=openshift-4.1 repair-bugs --auto --use-default-advisory rpm Bug ids may be given manually instead of using --auto: \b $ elliott --group=openshift-4.1 repair-bugs --id 170899 --id 8675309 --use-default-advisory rpm """ if auto and len(id) > 0: raise click.BadParameter( "Combining the automatic and manual bug modification options is not supported" ) if not auto and len(id) == 0: # No bugs were provided raise click.BadParameter( "If not using --auto then one or more --id's must be provided") if advisory and default_advisory_type: raise click.BadParameter( "Use only one of --use-default-advisory or --advisory") if len(id) == 0 and advisory is None and default_advisory_type is None: # error, no bugs, advisory, or default selected raise click.BadParameter( "No input provided: Must use one of --id, --advisory, or --use-default-advisory" ) # Load bugzilla infomation and get a reference to the api runtime.initialize() bz_data = runtime.gitdata.load_data(key='bugzilla').data bzapi = elliottlib.bzutil.get_bzapi(bz_data) changed_bug_count = 0 attached_bugs = [] if default_advisory_type is not None: advisory = find_default_advisory(runtime, default_advisory_type) raw_bug_list = [] if auto: click.echo("Fetching Erratum(errata_id={})".format(advisory)) e = Erratum(errata_id=advisory) raw_bug_list = e.errata_bugs else: click.echo("Bypassed fetching erratum, using provided BZs") raw_bug_list = cli_opts.id_convert(id) green_print("Getting bugs for advisory") # Fetch bugs in parallel because it can be really slow doing it # one-by-one when you have hundreds of bugs pbar_header("Fetching data for {} bugs: ".format(len(raw_bug_list)), "Hold on a moment, we have to grab each one", raw_bug_list) pool = ThreadPool(cpu_count()) click.secho("[", nl=False) attached_bugs = pool.map( lambda bug: progress_func(lambda: bzapi.getbug(bug), '*'), raw_bug_list) # Wait for results pool.close() pool.join() click.echo(']') green_print("Got bugs for advisory") for bug in attached_bugs: if bug.status in original_state: changed_bug_count += 1 elliottlib.bzutil.set_state(bug, new_state, noop=noop) green_print("{} bugs successfullly modified (or would have been)".format( changed_bug_count))
def err(message): click.secho(message, fg="red", bold=True) click.get_current_context().abort()
def patch_android_apk(source: str, architecture: str, pause: bool, skip_cleanup: bool = True, enable_debug: bool = True, gadget_version: str = None, skip_resources: bool = False, network_security_config: bool = False) -> None: """ Patches an Android APK by extracting, patching SMALI, repackaging and signing a new APK. :param source: :param architecture: :param pause: :param skip_cleanup: :param enable_debug: :param gadget_version: :param skip_resources: :param network_security_config: :return: """ github = Github(gadget_version=gadget_version) android_gadget = AndroidGadget(github) # without an architecture set, attempt to determine one using adb if not architecture: click.secho('No architecture specified. Determining it using `adb`...', dim=True) o = delegator.run('adb shell getprop ro.product.cpu.abi') # read the ach from the process' output architecture = o.out.strip() if len(architecture) <= 0: click.secho('Failed to determine architecture. Is the device connected and authorized?', fg='red', bold=True) return click.secho('Detected target device architecture as: {0}'.format(architecture), fg='green', bold=True) # set the architecture we are interested in android_gadget.set_architecture(architecture) # check if a gadget version was specified. if not, get the latest one. if gadget_version is not None: github_version = gadget_version click.secho('Using manually specified version: {0}'.format(gadget_version), fg='green', bold=True) else: github_version = github.set_latest_version() click.secho('Using latest Github gadget version: {0}'.format(github_version), fg='green', bold=True) # get local version of the stored gadget local_version = android_gadget.get_local_version('android_' + architecture) # check if the local version needs updating. this can be either because # the version is outdated or we simply don't have the gadget yet, or, we want # a very specific version if parse_version(github_version) != parse_version(local_version) or not android_gadget.gadget_exists(): # download! click.secho('Remote FridaGadget version is v{0}, local is v{1}. Downloading...'.format( github_version, local_version), fg='green') # download, unpack, update local version and cleanup the temp files. android_gadget.download() \ .unpack() \ .set_local_version('android_' + architecture, github_version) \ .cleanup() click.secho('Patcher will be using Gadget version: {0}'.format(github_version), fg='green') patcher = AndroidPatcher(skip_cleanup=skip_cleanup) # ensure that we have all of the commandline requirements if not patcher.are_requirements_met(): return # work on patching the APK patcher.set_apk_source(source=source) patcher.unpack_apk(skip_resources=skip_resources) patcher.inject_internet_permission(skip_resources=skip_resources) if enable_debug: patcher.flip_debug_flag_to_true() if network_security_config: patcher.add_network_security_config() patcher.inject_load_library() patcher.add_gadget_to_apk(architecture, android_gadget.get_frida_library_path()) # if we are required to pause, do that. if pause: click.secho(('Patching paused. The next step is to rebuild the APK. ' 'If you require any manual fixes, the current temp ' 'directory is:'), bold=True) click.secho('{0}'.format(patcher.get_temp_working_directory()), fg='green', bold=True) input('Press ENTER to continue...') patcher.build_new_apk() patcher.sign_apk() patcher.zipalign_apk() # woohoo, get the APK! destination = source.replace('.apk', '.objection.apk') click.secho( 'Copying final apk from {0} to {1} in current directory...'.format(patcher.get_patched_apk_path(), destination)) shutil.copyfile(patcher.get_patched_apk_path(), os.path.join(os.path.abspath('.'), destination))
def poll_signed(runtime, minutes, advisory, default_advisory_type, noop): """Poll for the signed-status of RPM builds attached to ADVISORY. Returns rc=0 when all builds have been signed. Returns non-0 after MINUTES have passed and all builds have not been signed. This non-0 return code is the number of unsigned builds remaining. All builds must show 'signed' for this command to succeed. NOTE: The two advisory options are mutually exclusive. For testing in pipeline scripts this sub-command accepts a --noop option. When --noop is used the value of --minutes is irrelevant. This command will print out the signed state of all attached builds and then exit with rc=0 if all builds are signed and non-0 if builds are still unsigned. In the non-0 case the return code is the number of unsigned builds. Wait 15 minutes for the default 4.2 advisory to show all RPMS have been signed: $ elliott -g openshift-4.2 poll-signed --use-default-advisory rpm Wait 5 mintes for the provided 4.2 advisory to show all RPMs have been signed: $ elliott -g openshift-4.2 poll-signed -m 5 --advisory 123456 Print the signed status of all attached builds, exit immediately. Return code is the number of unsigned builds. \b $ elliott -g openshift-4.2 poll-signed --noop --use-default-advisory rpm """ if not (bool(advisory) ^ bool(default_advisory_type)): raise click.BadParameter( "Use only one of --use-default-advisory or --advisory") runtime.initialize(no_group=default_advisory_type is None) if default_advisory_type is not None: advisory = find_default_advisory(runtime, default_advisory_type) if not noop: click.echo("Polling up to {} minutes for all RPMs to be signed".format( minutes)) try: e = Erratum(errata_id=advisory) all_builds = set([]) all_signed = False # `errata_builds` is a dict with brew tags as keys, values are # lists of builds on the advisory with that tag for k, v in e.errata_builds.items(): all_builds = all_builds.union(set(v)) green_prefix("Fetching initial states: ") click.echo("{} builds to check".format(len(all_builds))) start_time = datetime.datetime.now() while datetime.datetime.now() - start_time < datetime.timedelta( minutes=minutes): pbar_header("Getting build signatures: ", "Should be pretty quick", all_builds) pool = ThreadPool(cpu_count()) # Look up builds concurrently click.secho("[", nl=False) build_sigs = pool.map( lambda build: progress_func( lambda: elliottlib.errata.build_signed(build), '*'), all_builds) # Wait for results pool.close() pool.join() click.echo(']') if all(build_sigs): all_signed = True break elif noop: # Escape the time-loop break else: yellow_prefix("Not all builds signed: ") click.echo("re-checking") continue if not all_signed: red_prefix("Signing incomplete: ") if noop: click.echo("All builds not signed. ") else: click.echo( "All builds not signed in given window ({} minutes). ". format(minutes)) exit(1) else: green_prefix("All builds signed: ") click.echo("Enjoy!") except ErrataException as ex: raise ElliottFatalError(getattr(ex, 'message', repr(ex)))