def provision_with_aws(configuration, operating_system, stage, name, discrete_ssh_config): """ Provision a VM in the cloud using AWS EC2. :param configuration: Origin CI tool configuration :param operating_system: operating system used for the VM :param stage: image stage the VM was based off of :param name: name to give to the VM instance :param discrete_ssh_config: whether to update ~/.ssh/config or write a new file """ if not configuration.aws_client_configuration.keypair_name: raise ClickException('No key-pair name found! Configure one using:\n $ oct configure aws-client keypair_name NAME') if not configuration.aws_client_configuration.private_key_path: raise ClickException( 'No private key path found! Configure one using:\n $ oct configure aws-client private_key_path PATH' ) configuration.run_playbook( playbook_relative_path='provision/aws-up', playbook_variables={ 'origin_ci_aws_hostname': configuration.next_available_vagrant_name, # TODO: fix this 'origin_ci_aws_ami_os': operating_system, 'origin_ci_aws_ami_stage': stage, 'origin_ci_aws_instance_name': name, 'origin_ci_inventory_dir': configuration.ansible_client_configuration.host_list, 'origin_ci_aws_keypair_name': configuration.aws_client_configuration.keypair_name, 'origin_ci_aws_private_key_path': configuration.aws_client_configuration.private_key_path, 'origin_ci_ssh_config_strategy': 'discrete' if discrete_ssh_config else 'update', 'openshift_schedulable': True, 'openshift_node_labels': { 'region': 'infra', 'zone': 'default', }, }, ) if stage == Stage.bare: # once we have the new host, we must partition the space on it # that was set aside for Docker storage, then update the kernel # partition tables and set up the volume group backed by the LVM # pool configuration.run_playbook(playbook_relative_path='provision/aws-docker-storage', )
def remove_account(self, id): if id in self._accounts: account = self._accounts[id] del self._accounts[id] if self._active.id == id: if self._accounts: self._active = list(self._accounts.values())[0] else: self._active = None return account raise ClickException(f'The account identified by {id} does not exist.')
def bumpit(config, part, value, dry_run): try: run( config, ConsoleLogger(), run_settings=RunSettings(dry_run=dry_run, target_part=part, force_value=value), ) except Exception as e: raise ClickException(e)
def test_click_exception_isnt_shadowed_by_runtime_error(monkeypatch): monkeypatch.setattr( cli.cli_module.examples, 'main', Mock(side_effect=ClickException('some click exception'))) runner = CliRunner() result = runner.invoke(cli.cli, ['examples']) assert result.exit_code == 1 assert result.output == 'Error: some click exception\n'
def wait_for_services_stable(cluster, region, ecs_client=None): ecs_client = ecs_client if ecs_client else _get_ecs_client(region) waiter = ecs_client.get_waiter('services_stable') non_daemon_services = _get_non_daemon_services(cluster, region, ecs_client) try: for i in range(0, len(non_daemon_services), 10): # split into chunks by 10 waiter.wait(cluster=cluster, services=non_daemon_services[i:i + 10]) except WaiterError as ex: raise ClickException(ex)
def __init__(self, path=None): super(AssemblaSpace, self).__init__(path) if not self.origin_url: raise ClickException( 'Can\'t identify Assembla repo: no origin found') match = re.match(r'git@git\.assembla\.com:([^.]+)(\..*)?\.git', self.origin_url) if not match: match = re.match( r'https:\/\/git\.assembla\.com\/([^.]+)(\..*)?\.git', self.origin_url) if match is not None: self.name = match.group(1) else: raise ClickException('Not inside an Assembla git repo: ' + self.origin_url)
def user_delete(username): """Delete user by username""" user = User.query.get(username) if not user: raise ClickException(f'User "{username}" not found') db.session.delete(user) db.session.commit() echo(f'User "{username}" successfully deleted')
def app_create(context, filename, format): file_content = filename.read() if format == 'yaml': try: app = yaml.load(file_content) except Exception as e: raise ClickException('Invalid YAML file.') from e else: try: app = json.loads(file_content) except Exception as e: raise ClickException('Invalid JSON file.') from e try: context.apps.validate_schema(app) app_id = context.apps.create(app) click.echo("Application creation OK - ID : {}".format(app_id)) except Exception as e: raise ClickException( 'Cannot create your application. API Exception.\n{}'.format( e)) from e
def release(dev, master, version, app_path='touchresume'): """Make Git release.""" if not match(version, f'>{__version__}'): raise ClickException(f'Version must be greater than {__version__}') repo = Repo() release = f'release/{version}' echo(f'Create {release} branch') repo.head.ref = repo.heads[dev] repo.head.ref = repo.create_head(release) echo(f'Bump version - {version}') version_file = os.path.join(app_path, '__init__.py') with open(version_file, 'r+') as f: content = f.read() target = f"__version__ = '{__version__}'" value = f"__version__ = '{version}'" f.seek(0) f.write(content.replace(target, value)) repo.index.add([version_file]) repo.index.commit(f'bump version - v{version}') diff = repo.head.commit.diff(None) cf = re.compile(r'^change[s|log].*') changelog_files = [d.a_path for d in diff if cf.match(d.a_path.lower())] if changelog_files: echo(f'Commit {", ".join(changelog_files)}') repo.index.add(changelog_files) repo.index.commit(f'update changelog - v{version}') rf = 'readme' readme_files = [d.a_path for d in diff if d.a_path.lower().startswith(rf)] if readme_files: echo(f'Commit {", ".join(readme_files)}') repo.index.add(readme_files) repo.index.commit(f'update readme - v{version}') echo(f'Merge {release} into {master}') repo.head.ref = repo.heads[master] parents = (repo.branches[release].commit, repo.branches[master].commit) repo.index.commit(f'merge {release}', parent_commits=parents) echo(f'Create v{version} tag') repo.create_tag(f'v{version}') echo(f'Merge {release} back into {dev}') repo.head.ref = repo.heads[dev] dev_parents = (repo.branches[release].commit, repo.branches[dev].commit) repo.index.commit(f'merge {release} back', parent_commits=dev_parents) echo(f'Delete {release} branch') repo.delete_head(release)
def startup(datasette): # Validate configuration config = datasette.plugin_config("datasette-graphql") or {} if "databases" in config: for database_name in config["databases"].keys(): try: datasette.get_database(database_name) except KeyError: raise ClickException( "datasette-graphql config error: '{}' is not a connected database" .format(database_name))
def _validate_worksheet_sheet(ws, worksheet): ws_type = get_ws_type_by_worksheet_name(worksheet) max_letter = get_col_limit_by_ws_type(ws_type) col_headers = get_col_headers_by_ws_type(ws_type) cels = ws['A1': f'{max_letter}1'] for cel in cels[0]: if cel.value != col_headers[cel.column_letter]: raise ClickException( f'Invalid input file: column {cel.column_letter} ' f'must be {col_headers[cel.column_letter]}', )
def install_robotpy(): raise ClickException( inspect.cleandoc(""" The install-robotpy command has been removed! The equivalent commands are now: robotpy-installer install-python robotpy-installer install robotpy Run "robotpy-installer --help" for details. """))
def git_revision(rev: str): try: proc = git("rev-parse", "--verify", rev) except subprocess.CalledProcessError as err: raise ClickException( "Cannot interpret {!r} as a Git revision.\n[git] {}".format( rev, err.stderr.strip() ) ) return proc.stdout.strip()
def stop_service(cluster, service, region): ecs_client = _get_ecs_client(region) LOGGER.info('Stopping service: {}'.format(service)) try: response = ecs_client.update_service( cluster=cluster, service=service, desiredCount=0, ) except ClientError as ex: raise ClickException(ex)
def validate_entity_attributes(self): if self.report_type == "ENTITY": if not all([ attr in ENTITY_ATTRIBUTES[self.entity] for attr in self.entity_attributes ]): raise ClickException( f"Available attributes for '{self.entity}' are: {ENTITY_ATTRIBUTES[self.entity]}" )
def task_definitions(ctx, family=None, status=None): """ List of task definitions. """ try: result = fetch_task_definitions(ctx.obj["ecs_client"], family, status) except NoResultsException as e: raise ClickException(e) print(result.table)
def update_plugins(ignores: list[str], force: bool): """Downloads all plugins that are needed""" known_names = set(config.name for config in load_plugin_configs()) for ignore in ignores: if ignore not in known_names: raise ClickException(f"Unknown plugin name: {ignore}") for config in load_plugin_configs(): if config.name in ignores: print(f"Skipping {config}") continue print(f"Downloading {config}") for jar in config.jars: if len(config.jars) > 1: print(f" - Downloading {jar}") try: refresh = config.download_strategy.download(jar, force=force) except plugins.PluginError as e: raise ClickException(e) if not refresh: print(f" - Already exists: {jar}")
def deployment_show(context, deployment_id): try: app = context.deployments.retrieve(deployment_id) except ApiClientException as e: raise ClickException(e) from e click.echo( yaml.safe_dump(app, indent=4, allow_unicode=True, default_flow_style=False))
def all(recid, depid): """Recreate all subformats.""" if not recid and not depid: raise ClickException('Missing option "--recid" or "--depid"') value = recid or depid type = 'recid' if recid else 'depid' output, task_id = create_all_subformats(id_type=type, id_value=value) click.echo("Creating the following subformats: {0}. Task id: {1}".format( output, task_id))
def scrape(url, user, company, attribute, input_file, headless, output_file, driver): driver_options = {} if headless: driver_options = HEADLESS_OPTIONS if company: url = 'http://www.linkedin.com/company/' + company if user: url = 'http://www.linkedin.com/in/' + user if (url and input_file) or (not url and not input_file): raise ClickException( 'Must pass either a url or file path, but not both.') elif url: if 'LI_AT' not in os.environ: raise ClickException("Must set LI_AT environment variable") driver_type = Firefox if driver == 'Firefox' else Chrome if company: with CompanyScraper(driver=driver_type, cookie=os.environ['LI_AT'], driver_options=driver_options) as scraper: profile = scraper.scrape(company=company) else: with ProfileScraper(driver=driver_type, cookie=os.environ['LI_AT'], driver_options=driver_options) as scraper: profile = scraper.scrape(url=url) else: with open(input_file, 'r') as html: profile = Profile(html) if attribute: output = profile.__getattribute__(attribute) else: output = profile.to_dict() if output_file: with open(output_file, 'w') as outfile: json.dump(output, outfile) else: pprint(output)
def install(package, parallel): """ Install one or more packages. """ console.print(f"resolving packages: {package}") installers: Dict[str, Callable] = InstallerManager().get_installers() for pkg in package: if pkg not in installers: raise ClickException( f"unable to locate installer for package {pkg}") if parallel > 1: console.print(f"install {parallel} packages in parallel:") # collect installers and install in parallel: try: with Pool(processes=parallel) as pool: pool.map(_do_install, package) except Exception: raise ClickException("one or more package installations failed.")
def graph(input_file: str, output_file: str, fmt: str) -> None: """Generate dot image of graph from given input file. """ input_file = Path(input_file) output_file = Path(output_file) try: graph = parse_file(input_file) except ParseError as ex: raise ClickException(f'{input_file}:{ex.line} {ex.message}') else: graph.save(output_file, fmt=fmt)
def get_days_delta(day_range): delta_mapping = { "PREVIOUS_DAY": 1, "LAST_7_DAYS": 7, "LAST_30_DAYS": 30, "LAST_90_DAYS": 90 } try: days_delta = delta_mapping[day_range] except KeyError: raise ClickException(f"{day_range} is not handled by the reader") return days_delta
def wait_for_tasks_to_stop(cluster, tasks, timeout, region): ecs_client = _get_ecs_client(region) waiter = ecs_client.get_waiter('tasks_stopped') LOGGER.info('Waiting for tasks {} to stop.'.format(tasks)) try: waiter.wait( cluster=cluster, tasks=tasks, WaiterConfig={ 'Delay': 1, 'MaxAttempts': timeout, }, ) except ecs_client.exceptions.ClusterNotFoundException: raise ClickException("Cluster not found: '{}'".format(cluster)) except (ClientError, WaiterError) as ex: raise ClickException(ex) LOGGER.info('All tasks stopped.')
def call_compose_command(command, quiet=False): if not quiet: LOGGER.info(command if isinstance(command, str) else ' '.join(command)) compose_process = subprocess.Popen(command, stdout=sys.stdout, shell=isinstance(command, str)) try: if compose_process.wait() != 0: raise ClickException('Command returned error') except KeyboardInterrupt: try: compose_process.wait() except KeyboardInterrupt: compose_process.wait()
def validate_ad_insights_breakdowns(self): if self.ad_insights: missing_breakdowns = { f[0] for f in self._field_paths if (f[0] in BREAKDOWNS) and (f[0] not in self.breakdowns) } if missing_breakdowns != set(): raise ClickException( f"Wrong query. Please add to Breakdowns: {missing_breakdowns}" )
def find_pub_address_file(base_file: str) -> str: """ Given a file name, which could point to a private or public key file, guess at the name of the public key file. """ pub_addr_file = pub_address_file(base_file) if exists(pub_addr_file): return pub_addr_file if exists(base_file): return base_file raise ClickException(f"No public key file {pub_addr_file} or {base_file}")
def missing(recid, depid): """Create missing subformats given a record id or deposit id.""" if not recid and not depid: raise ClickException('Missing option "--recid" or "--depid"') value = recid or depid type = 'recid' if recid else 'depid' output = create_all_missing_subformats(id_type=type, id_value=value) if output: click.echo("Creating the following subformats: {0}".format(output)) else: click.echo("No missing format to create")
def get_commit_hash(cwd=None): """拿cwd的最新的commit hash.""" ctx = click.get_current_context() r = delegator.run('git rev-parse HEAD', cwd=cwd) if r.return_code: raise ClickException(r.err) commit_hash = r.out.strip() if ctx.obj['debug']: click.echo(debug_log('get_commit_hash: %s', commit_hash)) return commit_hash
def load_eth_address(eth_addr: Optional[str]) -> str: """ Given an --eth-addr command line param, either parse the address, load from the file, or use a default file name. """ eth_addr = eth_addr or ETH_ADDRESS_DEFAULT if eth_addr.startswith("0x"): return Web3.toChecksumAddress(eth_addr) if exists(eth_addr): with open(eth_addr, "r") as eth_addr_f: return Web3.toChecksumAddress(eth_addr_f.read().rstrip()) raise ClickException(f"could find file or parse eth address: {eth_addr}")