def common_fix_or_check(context, verbose: int, files, check_only: bool) -> None: """Common CLI code for both "fix" and "check" commands.""" if verbose: level = logging.INFO if verbose == 1 else logging.DEBUG # https://loguru.readthedocs.io/en/stable/resources/recipes.html#changing-the-level-of-an-existing-handler # https://github.com/Delgan/loguru/issues/138#issuecomment-525594566 logger.remove() logger.add(sys.stderr, level=logging.getLevelName(level)) logger.enable(PROJECT_NAME) nit = get_nitpick(context) try: for fuss in nit.run(*files, autofix=not check_only): nit.echo(fuss.pretty) except QuitComplainingError as err: for fuss in err.violations: click.echo(fuss.pretty) raise Exit(2) from err click.secho(Reporter.get_counts()) if Reporter.manual or Reporter.fixed: raise Exit(1)
def invoke(self, ctx): try: return super(ExceptionHandlingGroup, self).invoke(ctx) except ( exceptions.NoResultsException, exceptions.NotFoundException, ) as e: logger.error(e) raise Exit(1) except ( exceptions.PartialResultsException, ) as e: logger.error(e) raise Exit(3) except ( exceptions.InternalFailureException, api_exceptions.PolyswarmException, exceptions.PolyswarmException, JSONDecodeError, UnicodeDecodeError, ) as e: logger.error(e) raise Exit(2) except (Exit, ClickException): raise except Exception as e: logger.exception(e) logger.error('Unhandled exception happened. Please contact support.') raise Exit(2)
def run(state, tasks): """ Perform specified task(s) and all of its dependencies. When no task is given, the default (__DEFAULT__) task is run, if no default task has been defined, all available tasks are listed. """ global _logger _logger = get_tasks_logger() if tasks: # Run the given tasks try: tasks_to_run = _prepare_tasks_to_run(state.module, tasks) except (TaskNotFoundError, MalformedTaskArgumentError) as exc: logger.error(str(exc)) raise Exit(1) _run_tasks(tasks=tasks_to_run) else: # Run the default task or list available tasks default_task = state.module.default_task if default_task is not None: prepared_default_task = [(default_task, [], {})] _run_tasks(prepared_default_task) else: list_tasks(state.module)
def subscribe(ytcc: core.Ytcc, name: str, url: str): """Subscribe to a playlist. The NAME argument is the name used to refer to the playlist. The URL argument is the URL to a playlist that is supported by youtube-dl. """ try: ytcc.add_playlist(name, url) except BadURLException as bad_url: logger.error("The given URL does not point to a playlist or is not supported by " "youtube-dl") raise Exit(1) from bad_url except NameConflictError as name_conflict: logger.error("The given name is already used for another playlist " "or the playlist is already subscribed") raise Exit(1) from name_conflict
def main(ctx, **kwargs): """ \b ,-~~-.___. / | ' \\ ( ) 0 \_/-, ,----' ==== // / \-'~; /~~~(O) / __/~| / | =( _____| (_________| """ algo_args = kwargs.copy() algorithm = algo_args.pop('algorithm') algo_args.pop('benchmark') if algorithm == 'csidh': from sibc.csidh import CSIDH algo = CSIDH(**algo_args) elif algorithm == 'bsidh': from sibc.bsidh import BSIDH algo_args.pop('style') algo_args.pop('exponent') algo = BSIDH(**algo_args) else: click.echo('algorithm not implemented') raise Exit(1) kwargs['algo'] = algo ctx.meta['sibc.kwargs'] = attrdict(kwargs)
def init(no_backend, args): """ Initialize this layer. """ backend_config = [ "-backend=false" if no_backend else f"-backend-config={BACKEND_TFVARS}" ] exit_code, _ = run(command="init", args=backend_config + list(args)) if exit_code: raise Exit(exit_code)
def login( username: str, password: str, token: Optional[str], host: Optional[str], yes: bool, verify_ssl: bool, ) -> None: """Log in into Valohai.""" if settings.user and settings.token: current_username = settings.user['username'] current_host = settings.host if not yes: click.confirm(( f'You are already logged in as {current_username} on {current_host}.\n' 'Are you sure you wish to acquire a new token?'), abort=True) else: info( f'--yes set: ignoring pre-existing login for {current_username} on {current_host}' ) if not (token or username or password or host): # Don't show the banner if this seems like a non-interactive login. click.secho(f'Welcome to Valohai CLI {__version__}!', bold=True) host = validate_host(host) if token: if username or password: error('Token is mutually exclusive with username/password') raise Exit(1) click.echo(f'Using token {token[:5]}... to log in.') else: token = do_user_pass_login( host=host, username=username, password=password, verify_ssl=verify_ssl, ) click.echo(f'Verifying API token on {host}...') with APISession(host, token, verify_ssl=verify_ssl) as sess: user_data = sess.get('/api/v0/users/me/').json() settings.persistence.update( host=host, user=user_data, token=token, verify_ssl=verify_ssl, ) settings.persistence.save() success(f"Logged in. Hey {user_data.get('username', 'there')}!") if not verify_ssl: warn( "SSL verification is off. This may leave you vulnerable to man-in-the-middle attacks." )
def failure(message, exit=False): """Prints a failure message to the terminal""" click.echo( emoji.emojize(":heavy_multiplication_x: %s" % message, use_aliases=True), color="red", err=True, ) if exit: raise Exit(1)
def terraform(): """ Run Terraform commands in a custom containerized environment that provides extra functionality when interacting with your cloud provider such as handling multi factor authentication for you. All terraform subcommands that receive extra args will pass the given strings as is to their corresponding Terraform counterparts in the container. For example as in `leverage terraform apply -auto-approve` or `leverage terraform init -reconfigure` """ if not all((ROOT, CONFIG, ACCOUNT, ACCOUNT_CONFIG)): logger.error("Not running in a Leverage project. Exiting.") raise Exit(1)
def cmd_constraint_project_set_up(ctx): cmd_constraint_project_loaded(ctx) if not ctx.project_is_set_up: echo( style( "Thanks for using Riptide! You seem to be working with a new project.\n" "Please run the ", fg='yellow') + style("setup", bold=True, fg='yellow') + style(" command first.", fg='yellow')) raise Exit(1)
def rename(ytcc: core.Ytcc, old: str, new: str): """Rename a playlist. Renames the playlist OLD to NEW. """ try: ytcc.rename_playlist(old, new) except NameConflictError as nce: logger.error("'%s'", str(nce)) raise Exit(1) from nce
def unsubscribe(ytcc: core.Ytcc, name: str): """Unsubscribe from a playlist. Unsubscribes from the playlist identified by NAME. """ try: ytcc.delete_playlist(name) except PlaylistDoesNotExistException as err: logger.error("Playlist '%s' does not exist", name) raise Exit(1) from err else: logger.info("Unsubscribed from %s", name)
def _run(task, completed_tasks, *args, **kwargs): """ Run the given task and all it's required dependencies, keeping track of all the already completed tasks as not to repeat them. Args: task (list): Tasks to run. completed_tasks (set): Tasks that have already ran. Returns: set: Updated set of already executed tasks. """ # Satisfy dependencies recursively. for dependency in task.dependencies: _completed_tasks = _run(dependency, completed_tasks) completed_tasks.update(_completed_tasks) if task not in completed_tasks: if task.is_ignored: _logger.info( f"[bold yellow]⤳[/bold yellow] Ignoring task [bold italic]{task.name}[/bold italic]" ) else: _logger.info( f"[bold yellow]➜[/bold yellow] Starting task [bold italic]{task.name}[/bold italic]" ) try: task(*args, **kwargs) except Exception as exc: # Remove the two topmost frames of the traceback since they are internal leverage function calls, # only frames pertaining to the build script and its dependencies are shown. exc.__traceback__ = exc.__traceback__.tb_next.tb_next exc = clean_exception_traceback(exception=exc) _logger.exception( f"[bold red]![/bold red] Error in task [bold italic]{task.name}[/bold italic]", exc_info=exc) _logger.critical( "[red]✘[/red] [bold on red]Aborting build[/bold on red]") raise Exit(1) _logger.info( f"[green]✔[/green] Completed task [bold italic]{task.name}[/bold italic]" ) completed_tasks.add(task) return completed_tasks
def configure_accounts_profiles(profile, region, organization_accounts, project_accounts): """ Set up the required profiles for all accounts to be used with AWS cli. Backup previous profiles. Args: profile(str): Name of the profile to configure. region (str): Region. organization_accounts (dict): Name and id of all accounts in the organization. project_accounts (dict): Name and email of all accounts in project configuration file. """ short_name, type = profile.split("-") mfa_serial = "" if PROFILES[type]["mfa"]: logger.info("Fetching MFA device serial.") mfa_serial = _get_mfa_serial(profile) if not mfa_serial: logger.error( "No MFA device found for user. Please set up a device before configuring the accounts profiles." ) raise Exit(1) account_profiles = {} for account in project_accounts: account_name = account["name"] # DevOps roles do not have permission over management account if "security" in profile and account_name == "management": continue # TODO: Add remaining profiles for remaining accounts declared in code if enough information is available account_id = organization_accounts.get(account_name, account.get("id")) if account_id is None: continue # A profile identifier looks like `le-security-oaar` account_profiles[ f"{short_name}-{account_name}-{PROFILES[type]['profile_role']}"] = { "output": "json", "region": region, "role_arn": f"arn:aws:iam::{account_id}:role/{PROFILES[type]['role']}", "source_profile": profile, "mfa_serial": mfa_serial } logger.info("Backing up account profiles file.") _backup_file("config") for profile_identifier, profile_values in account_profiles.items(): configure_profile(profile_identifier, profile_values)
def _extract_credentials(file): """ Extract AWS credentials from given file. Print message and quit application if file is malformed. Access Keys files have the form: Access key ID,Secret access key AKUDKXXXXXXXXXXXXXXX,examplesecreteLkyvWWjxi29dJ63Geo1Ggl956b Args: file (Path): Credentials file as obtained from AWS Console. Raises: Exit: When file content does not conform to expected form. Returns: str, str: Key ID, Secret Key """ with open(file) as access_keys_file: try: keys = next(csv.DictReader(access_keys_file)) except csv.Error: click.echo("\nMalformed access keys file\n") raise Exit(1) try: access_key_id = keys["Access key ID"] secret_access_key = keys["Secret access key"] except KeyError: click.echo("\nFields for keys not found in access keys file\n") raise Exit(1) if not re.match(KEY_ID, access_key_id) or not re.match( SECRET_KEY, secret_access_key): click.echo("\nMalformed keys in access keys file\n") raise Exit(1) return access_key_id, secret_access_key
def init(context, style_urls): """Create a [tool.nitpick] section in the configuration file if it doesn't exist already.""" nit = get_nitpick(context) config = nit.project.read_configuration() if config.file and PROJECT_NAME in TomlDoc(path=config.file).as_object.get( TOOL_KEY, {}): click.secho( f"The config file {config.file.name} already has a [{TOOL_NITPICK_KEY}] section.", fg="yellow") raise Exit(1) nit.project.create_configuration(config, *style_urls) click.secho( f"A [{TOOL_NITPICK_KEY}] section was created in the config file: {config.file.name}", fg="green")
def run(local_command: LocalCommand, args: [str], exit_on_error=True) -> None: """ if the excecution process is finishing with an exit code of 0 There is one or two exception as the execution of migration by alembic through honcho. exit_on_error allow to manage them :param exit_on_error: break the flow if the exit code is different of 0 """ try: complete_command = local_command[args] working_directory = os.getcwd() logger().debug(f'{complete_command} - wd: {working_directory}') complete_command & FG except ProcessExecutionError as exception: if exit_on_error: raise Exit(code=exception.retcode) from exception
def _prepare_tasks_to_run(module, input_tasks): """ Validate input tasks and arguments and pair them with the corresponding module's task. Args: module (dict): Dict containing the tasks from the build script. input_tasks (list): Strings containing the tasks to invoke and their arguments as received from user input. Raises: MalformedTaskArgumentError: When the string representing the invocation of a task does not conform to the required pattern. TaskNotFoundError: When the specified task is not found in the ones defined in the build script. Returns: list(tuple): List of tasks paired with their corresponding args and kwargs as provided by the user. """ tasks = [] for input_task in input_tasks: match = _TASK_PATTERN.match(input_task) if not match: raise MalformedTaskArgumentError( f"Malformed task argument in `{input_task}`.") name = match.group("name") arguments = match.group("arguments") try: args, kwargs = parse_task_args(arguments=arguments) except (InvalidArgumentOrderError, DuplicateKeywordArgumentError) as exc: logger.error(str(exc).format(task=name)) raise Exit(1) task = [task for task in module.tasks if task.name == name] if not task: raise TaskNotFoundError(f"Unrecognized task `{name}`.") tasks.append((task[0], args, kwargs)) return tasks
def _load_build_script(build_script): """ Load build script as module and return the useful bits. If build script is malformed the exception trace is printed and the application exits. Args: build_script (str): Path to the file containing the definition of the tasks. Returns: Module: Name, tasks and default tasks of the module. """ build_script = Path(build_script) # Treat the folder in which the script is as a package to allow # relative imports package = build_script.parent sys.path.append(package.parent.as_posix()) # Package needs to be imported first importlib.import_module(package.name) # Load build script as module try: module = importlib.import_module(f".{build_script.stem}", package=package.name) except (ImportError, ModuleNotFoundError, SyntaxError) as exc: # Remove frames in the traceback until we reach the one pertaining to the build # script, as to avoid polluting the output with internal leverage calls, # only frames of the build script and its dependencies are shown. build_script = build_script.as_posix() while (exc.__traceback__ is not None and exc.__traceback__.tb_frame.f_code.co_filename != build_script): exc.__traceback__ = exc.__traceback__.tb_next exc = clean_exception_traceback(exception=exc) logger.exception("Error in build script.", exc_info=exc) raise Exit(1) return Module(name=Path(module.__file__).name, tasks=_get_tasks(module=module), default_task=getattr(module, "__DEFAULT__", None))
def load_project_config(): """ Load project configuration file. Raises: Exit: For any error produced during configuration loading. Returns: dict: Project configuration. """ if not PROJECT_CONFIG.exists(): logger.debug("No project config file found.") return {} try: return YAML().load(PROJECT_CONFIG) except Exception as exc: exc.__traceback__ = None logger.exception(message="Error loading configuration file.", exc_info=exc) raise Exit(1)
def ls(context, files): # pylint: disable=invalid-name """List of files configured in the Nitpick style. Display existing files in green and absent files in red. You can use partial and multiple file names in the FILES argument. """ nit = get_nitpick(context) try: violations = list(nit.project.merge_styles(nit.offline)) error_exit_code = 1 except QuitComplainingError as err: violations = err.violations error_exit_code = 2 if violations: for fuss in violations: click.echo(fuss.pretty) raise Exit(error_exit_code) # TODO: test: ls with invalid style # TODO: test: configured_files() API for file in nit.configured_files(*files): click.secho(relative_to_current_dir(file), fg="green" if file.exists() else "red")
def lint(xcodeproj_path, target_name, swiftlint_cfg_path, delete_config): with open(swiftlint_cfg_path, 'r') as sources: sources_config = yaml.safe_load(sources) with open('.swiftlint.yml', 'w') as output: sources_config['included'] = list_swift_files( xcodeproj_path, target_name) yaml.dump(sources_config, output, default_flow_style=False, allow_unicode=True) process = run(["swiftlint"]) if delete_config: try: os.remove('.swiftlint.yml') except OSError: click.echo( 'Cleanup failed, could not remove .swiftlint.yml file', err=True) if process.returncode != 0: raise Exit(process.returncode)
def abort_cli(msg="Fin du programme."): click.secho(msg, fg="red", bold=True) raise Exit(1)
def main(prime, algorithm): assert prime != None, 'argument --prime is required' """ Computing and storing sdacs """ f = open(resource_filename('sibc', 'data/sop/' + prime)) if algorithm == 'csidh': # CSIDH only requires the factorization of p + 1 L = f.readline() # The first value in L corresponds with the cofactor h of (p+1), which is not required here L = [int(l) for l in L.split()][1:] n = len(L) elif algorithm == 'bsidh': # B-SIDH only requires the factorization of p + 1 and p - 1 # The prime to be used p = f.readline() p = int(p, 16) # List corresponding (p + 1) Lp = f.readline() Lp = [int(lp) for lp in Lp.split()] # exponent_of_twop = Lp[0] # Lp = Lp[1:] Ep = f.readline() Ep = [int(ep) for ep in Ep.split()] assert len(Ep) == len(Lp) np = len(Lp) # List corresponding (p - 1) Lm = f.readline() Lm = [int(lm) for lm in Lm.split()] Em = f.readline() Em = [int(em) for em in Em.split()] assert len(Em) == len(Lm) nm = len(Lm) L = list(Lp + Lm) n = len(L) else: click.echo("only csidh and bsidh are currently implemented") raise Exit(1) f.close() def dacs(l, r0, r1, r2, chain): """ dacs() inputs: a small odd prime number l, three integer numbers, and a list output: all the differential additions chains corresponding with the input l NOTE: this is a recursive approach """ if r2 == l: return [(chain, r2)] elif r2 < l and len(chain) <= 1.5 * math.log(l, 2): return dacs(l, r0, r2, r2 + r0, chain + [1]) + dacs( l, r1, r2, r2 + r1, chain + [0]) else: return [] def sdac(l): """ sdac() input: a small odd prime number l output: the shortest differential additions chains corresponding with the input l NOTE: this function uses a recursive function """ all_dacs = dacs(l, 1, 2, 3, []) return min(all_dacs, key=lambda t: len(t[0]))[0] def generate_sdacs(L): """ Shortest Differential Addition Chains for each small odd prime l in L """ return list(map(sdac, L)) # Shortest Differential Addition Chains (SDACs) for each l_i path = resource_filename('sibc', "data/sdacs/" + prime) print("// Computing sdacs") SDACS = generate_sdacs(L) print("// Storing sdacs into a file") write_list_of_lists_of_ints_to_file(path, SDACS)
def cli_exit(code: int = 128) -> NoReturn: raise Exit(code)
def error(*args, **kwargs): click.echo(*args, err=True, **kwargs) raise Exit(1)
def handle_keyboard_interrupt(*args, **kwargs): answer = question(*args, **kwargs) if answer is None: raise Exit(1) return answer
def failure(message, exit=False): """Prints a failure message to the terminal""" click.echo("✖️ ", color="red", err=True, nl=False) click.echo(message, err=True) if exit: raise Exit(1)
def success_cli(msg="Terminé."): click.secho(msg, fg="green", bold=True) raise Exit(0)
def login(username: str, password: str, token: Optional[str], host: Optional[str], yes: bool) -> None: """Log in into Valohai.""" host = ( host # Explicitly set for this command, ... or settings.overrides.get('host') # ... or from the top-level CLI (or envvar) ... or default_app_host # ... or the global default ) if settings.user and settings.token: user = settings.user current_username = user['username'] if not yes: message = ( 'You are already logged in as {username}.\n' 'Are you sure you wish to acquire a new token?' ).format(username=current_username) click.confirm(message, abort=True) else: info(f'--yes set: ignoring pre-existing login for {current_username}') if token: if username or password: error('Token is mutually exclusive with username/password') raise Exit(1) click.echo(f'Using token {token[:5]}... to log in.') else: if not (username or password): click.secho(f'Welcome to Valohai CLI {__version__}!', bold=True) click.echo(f'\nIf you don\'t yet have an account, please create one at {host} first.\n') if not username: username = click.prompt('Username').strip() else: click.echo(f'Username: {username}') if not password: password = click.prompt('Password', hide_input=True) click.echo('Retrieving API token...') with APISession(host) as sess: try: token_data = sess.post('/api/v0/get-token/', data={ 'username': username, 'password': password, }).json() token = token_data['token'] except APIError as ae: code = ae.code if code in ('has_external_identity', 'has_2fa'): command = 'vh login --token TOKEN_HERE ' if host != default_app_host: command += f'--host {host}' banner(TOKEN_LOGIN_HELP.format(code=code, host=host, command=command)) raise click.echo('Verifying API token...') with APISession(host, token) as sess: user_data = sess.get('/api/v0/users/me/').json() settings.persistence.update(host=host, user=user_data, token=token) settings.persistence.save() success(f"Logged in. Hey {user_data.get('username', 'there')}!")