def delete(argorithm_id: str = typer.Argument( ..., help="argorithmID of function to be deleted.")): """Deletes argorithm from server. More info at https://argorithm.github.io/toolkit/cli#delete """ params = search(argorithm_id) flag = typer.confirm("Are you sure you want to delete it?") if not flag: typer.echo("Not deleting") raise typer.Abort() header = authmanager.get_header() data = { "argorithmID": params["argorithmID"], } url = app_settings.get_endpoint() + "/argorithms/delete" try: with Halo(text='Connecting', spinner='dots'): rq = requests.post(url, json=data, headers=header) except requests.RequestException as rqe: msg.fail("Connection failed", str(rqe)) raise typer.Abort() if rq.status_code == 200: msg.info("Deleted successfully", ) elif rq.status_code == 401: msg.warn("Not authorized", "only author of argorithm or admin can delete it") else: msg.fail("application error")
def dub(username: str = username, password: str = password, servername: str = servername, show: str = show, lang: str = lang): """Set the default dubs for every episode of the show.""" account = MyPlexAccount(username, password) plex: PlexServer = account.resource(servername).connect() try: not_found_dubs_count = 0 for part in _get_media_parts(plex, show): is_found = False for audio in part.audioStreams(): if audio.languageCode == lang: part.setDefaultAudioStream(audio) is_found = True break if not is_found: not_found_dubs_count += 1 if GLOBAL_OPTIONS['verbose']: typer.echo( f'Audio for "{lang}" not found for file: {part.file}', err=True) if not_found_dubs_count != 0: typer.echo(f'{not_found_dubs_count} dubs were not found', err=True) raise typer.Abort() except NotFound as e: typer.echo("Show, media item, or device is not found.", err=True) typer.echo(e, err=True) raise typer.Abort() typer.echo('Success!')
def delete(path: str, force: bool = False) -> NoReturn: """ Delete a Quetz deployment. Parameters ---------- path : str The path of the deployment force : bool, optional Whether to skip manual confirmation, or not {default=False} """ abs_path = os.path.abspath(path) deployments = _get_deployments() try: _ = deployments[abs_path] except KeyError: typer.echo('No Quetz deployement found at {}.'.format(path)) raise typer.Abort() delete = force or typer.confirm( "Delete Quetz deployement at {}?".format(path)) if not delete: raise typer.Abort() shutil.rmtree(abs_path) _clean_deployments()
def bootstrap( host: str = typer.Argument( None, help="the hostname of the configuration to build"), nixos: bool = False, darwin: bool = False, home_manager: bool = False, ): cfg = select(nixos=nixos, darwin=darwin, home_manager=home_manager) flags = "-v --experimental-features 'nix-command flakes'" if cfg is None: return elif cfg == FlakeOutputs.NIXOS: typer.secho( "boostrap does not apply to nixos systems.", fg=Colors.ERROR.value, ) raise typer.Abort() elif cfg == FlakeOutputs.DARWIN: diskSetup() flake = f".#{cfg.value}.{host}.config.system.build.toplevel {flags}" run_cmd(f"nix build {flake} {flags}") run_cmd("./result/activate-user && ./result/activate") elif cfg == FlakeOutputs.HOME_MANAGER: flake = f".#{FlakeOutputs.HOME_MANAGER.value}.{host}.activationPackage" run_cmd(f"nix build {flake} {flags}") run_cmd("./result/activate") else: typer.secho("could not infer system type.", fg=Colors.ERROR.value) raise typer.Abort()
def switch( host: str = typer.Argument( default=None, help="the hostname of the configuration to build"), nixos: bool = False, darwin: bool = False, home_manager: bool = False, ): if not host: typer.secho("Error: host configuration not specified.", fg=Colors.ERROR.value) raise typer.Abort() else: cfg = select(nixos=nixos, darwin=darwin, home_manager=home_manager) if cfg is None: return elif cfg == FlakeOutputs.NIXOS: cmd = f"sudo nixos-rebuild switch --flake" elif cfg == FlakeOutputs.DARWIN: cmd = f"darwin-rebuild switch --flake" elif cfg == FlakeOutputs.HOME_MANAGER: cmd = f"home-manager switch --flake" else: typer.secho("could not infer system type.", fg=Colors.ERROR.value) raise typer.Abort() flake = f".#{host}" flags = " ".join(["--show-trace"]) run_cmd(f"{cmd} {flake} {flags}")
def search(query: str, cache: bool = True, kind="table"): """search for videos with query on youtube.""" try: typer.secho(f"searching for videos: {query}...", fg=typer.colors.GREEN) if kind == "json": content = main(query, cache, kind="json") rprint(content) elif kind == "table": content = main(query, False, kind="list") table = Table(title="Search Results") table.add_column("Title", justify="right", style="cyan", no_wrap=False) table.add_column("Video URL", style="magenta", no_wrap=True) table.add_column("Duration", style="green") table.add_column("View count", style="green") table.add_column("Published Time", style="green") for c in content: table.add_row( " ".join(c["title"].split(" ")[:4]), c["url"], c["duration"], str(c["view_count"]), c["publish_time"], ) console = Console() console.print(table) except YtsException: typer.echo("internal API raised an exception") raise typer.Abort() except YtsException: typer.echo("failed to get response from youtube") raise typer.Abort()
def validate(online: Optional[bool] = typer.Option( False, "--online", show_default=False, help="Test the provided authentication settings on the actual" " instance of Confluence.", )): """Validates the provided settings. If 'online' flag is passed - tries to fetch the space from the config using the supplied credentials.""" echo = state.print_function echo_err = state.print_stderr if online: echo("Validating settings against the Confluence instance from config") try: space_key = state.config.pages[0].page_space state.print_function(f"Trying to get {space_key}...") space_id = state.confluence_instance.get_space(space_key).get("id") except ConnectionError: echo_err( f"Could not connect to {state.config.auth.url}. Make sure it is correct", ) raise typer.Abort(1) except ApiError as e: echo_err(f"Got an API error, details: {e.reason}") raise typer.Abort(1) else: echo(f"Got space id #{space_id}.") echo("Validation successful")
def delete_key( files: List[str] = typer.Option( None, "--files", "-f", help="Chose either a file or files with absolute path"), prompt: bool = typer.Option(True, help="Display a prompt to confirm deletion"), threads: int = typer.Option( 1, help= "Set the amount of threads to delete keys in parallel. Disable the prompt if using this option", ), ): """USE WITH EXTREME CAUTION! Deletes a given key or keys""" if not files: typer.echo("No files provided") raise typer.Abort() for f in files: if f[0] == "/": typer.echo("DO NOT DELETE A KEY STARTING WITH /") raise typer.Abort() for f in files: if f[-1] == "/": typer.echo("DO NOT DELETE A KEY ENDING WITH /") raise typer.Abort() # try: keys = [f for f in files] with ThreadPoolExecutor(max_workers=threads) as executor: futures = [executor.submit(_deleter, k, prompt) for k in keys] for f in futures: f.result()
def add_students( handles: List[str] = Argument( metavar="student_handles", default=..., help="list of student handles separated by white space", ), yes: bool = Opt.ACCEPT_ALL, dry: bool = Opt.DRY, ): """Add student to Github Organization""" result = DoCheck(settings=settings).withOptions(gh_config_valid=True).run() if result.success == False: typer.echo(result.info) raise typer.Abort() org, team = settings.github.org, settings.add.student_team if not (yes or typer.confirm(f"Add students to {org}/{team}?")): raise typer.Abort() # add some checks before running the script print(f"check student team exists {team}") from hand.scripts import Script from hand.scripts.add import ScriptAddStudents script: Script = ScriptAddStudents(gh_invite_team=team, user_handles=handles, dry_run=dry) script.run()
def bootstrap( host: str = typer.Argument( None, help="the hostname of the configuration to build"), nixos: bool = False, darwin: bool = False, home_manager: bool = False, ): cfg = select(nixos=nixos, darwin=darwin, home_manager=home_manager) flags = "-v --experimental-features 'nix-command flakes'" if cfg is None: return elif cfg == FlakeOutputs.NIXOS: typer.secho( "boostrap does not apply to nixos systems.", fg=Colors.ERROR.value, ) raise typer.Abort() elif cfg == FlakeOutputs.DARWIN: diskSetup() flake = f".#{cfg.value}.{host}.config.system.build.toplevel {flags}" run_cmd(f"nix build {flake} {flags}") run_cmd(f"./result/sw/bin/darwin-rebuild switch --flake .#{host}") elif cfg == FlakeOutputs.HOME_MANAGER: flake = f".#{host}" run_cmd( f"nix run github:nix-community/home-manager {flags} --no-write-lock-file -- switch --flake {flake} -b backup" ) else: typer.secho("could not infer system type.", fg=Colors.ERROR.value) raise typer.Abort()
def update_timesheet( start_date: str = typer.Option(None), dry_run: bool = typer.Option(False), round_to_nearest: float = typer.Option(None), round_up: bool = typer.Option(False), ): settings = toml.load(CONFIG_PATH) if settings == {}: typer.echo("Settings not configured!") raise typer.Abort() TOGGL = settings["toggl"] GOOGLE_SETTINGS = settings["google_api"] WORKSPACE = TOGGL["workspace"] api = TimesheetAPI(TOGGL["api_token"]) g_api = GoogleAPI(**GOOGLE_SETTINGS) db = Database("file::memory:?cache=shared", bootstrap=True) parsed_start = parse_iso(start_date).isoformat() if start_date else None workspaces = [i["name"] for i in api.get_workspaces()] if WORKSPACE not in workspaces: typer.echo( f"The workspace {WORKSPACE} does not exist, the options are:{workspaces}" ) raise typer.Abort() typer.echo("Stopping running entries...") api.stop_running_entry() typer.echo("Updating the time entries...") db.update_table("time_entries", api.get_time_entries(start_date=parsed_start)) typer.echo("Updating the projects...") db.update_table("project", api.get_projects(get_workspace_id(api, WORKSPACE))) db.update_table("project_name", g_api.available_projects) typer.echo("Thumbing your Toggl timesheets into google sheet format...") insert_time = parse_iso( g_api.last_entered_date(default_datetime=parse_iso(parsed_start))) grouped = group_by_date( db.get_latest_time_entries(insert_time), settings["google_api"]["date_format"], round_up, round_to_nearest, ) published_values = list(grouped.values()) if dry_run: typer.echo("Dry run results:") typer.echo(published_values) elif len(published_values) > 0: typer.echo(f"Sending times for days: {list(grouped.keys())}") g_api.append_to_time_sheets(published_values) typer.echo("Sent!") else: typer.echo( "No new timesheets available, you should probably do some work.")
def christis_API( ldap_server: str = typer. Option( ..., "--ldap-server-address", help= "The LDAP server address. Should be in this format LDAP://<Address>"), ldap_user_connector: str = typer. Option( ..., "--ldap-user", help= "The User DN that can access to server to query like CN=Administrator,CN=Users,DC=cloudarmin,DC=local" ), ldap_password_connector: str = typer.Option( ..., "--ldap-password", help="The password of user who access to server"), ldap_base_dn: str = typer.Option( ..., "--ldap-base-dn", help="The base DN of you LDAP domain like dc=cloudarmin,dc=local"), ldap_k8s_group: str = typer.Option( ..., "--ldap-k8s-group", help="The cn of group that its users should access the K8s cluster" )): """ Generate ChristisCLI configuration file that will be used by ChristisAPI Docker container in the ~/.christisCLI location """ config = {} config["ldap"] = {} CLI_Config_Location = get_christis_config_location() configFileLocation = "{0}{1}".format(CLI_Config_Location, "ldap.yaml") if Path(configFileLocation).is_file(): overwrite = typer.confirm( "The LDAP configuration exists, Do you want to generate it again?") if not overwrite: typer.echo("Not Generating New Configuration File") raise typer.Abort() typer.echo("Generate New Config File") config["ldap"]["ldap_server"] = ldap_server config["ldap"]["ldap_user_connector"] = ldap_user_connector config["ldap"]["ldap_password_connector"] = ldap_password_connector config["ldap"]["ldap_base_dn"] = ldap_base_dn config["ldap"]["ldap_k8s_group"] = ldap_k8s_group if not os.path.exists(CLI_Config_Location): os.makedirs(CLI_Config_Location) try: with open(configFileLocation, 'w') as file: configFile = yaml.dump(config, file) except Exception as e: typer.echo(e, err=True) raise typer.Abort()
def tools_checkm(fastas_dir: Path = typer.Option( ..., exists=True, file_okay=False, dir_okay=True, writable=False, readable=True, resolve_path=True, show_default=True ), suffix: str = typer.Option("fasta", show_default=True), genus: str = typer.Option("lineage_wf", show_default=True), output_dir: Path = typer.Option( "./checkm_out/", exists=False, file_okay=False, dir_okay=True, writable=True, readable=True, resolve_path=True, show_default=True ), threads: int = typer.Option(16, show_default=True), checkm_table: Path = typer.Option( "./checkm.txt", exists=False, file_okay=True, dir_okay=False, writable=True, readable=True, resolve_path=True, show_default=True ), force: bool = typer.Option(False, "--force", "-f") ): typer.echo(f"Checking output directory.") if output_dir.exists() and output_dir.is_dir(): if force: output_dir.rename(str(output_dir)+"%d"%(int(time.time()))) typer.secho(f"Rename checkm output dir.", fg=typer.colors.RED) else: typer.secho(f"Output directory already exist, please move/delete and try again.", fg=typer.colors.RED) raise typer.Abort() if checkm_table.exists() and checkm_table.is_file(): if force: checkm_table.rename(str(checkm_table)+"%d"%(int(time.time()))) typer.secho(f"Rename checkm result file.", fg=typer.colors.RED) else: typer.secho(f"Output checkm results already exist.", fg=typer.colors.RED) raise typer.Abort() subprocess.check_call(" ".join([str(Path(__file__).resolve().parent.joinpath('checkm.sh')), str(fastas_dir), str(suffix), str(genus), str(output_dir), str(threads), str(checkm_table)]), shell=True) typer.secho(f"Finished", fg=typer.colors.GREEN)
def makemigrations(): check_dir() try: mgr.make_migrations() except NoMigrationsError: typer.secho('WARNING: 0 new migrations were found', fg='yellow') raise typer.Abort() except UnidentifiedSQLError as e: typer.secho( f'''Error: File {e.args[1]} contains sql that does not belong to upgrade or downgrade:\n{e.args[0]}''', fg='red', bg='white') raise typer.Abort()
def cli( api_address: str = typer.Option(..., "--api-address", envvar="CHRISTIS_API_ADDRESS", help="Christis API Server Address"), api_port: str = typer.Option(..., "--api-port", envvar="CHRISTIS_API_PORT", help="Christis API Server Port"), k8s_group: str = typer.Option( ..., "--k8s-group", envvar="CHRISTIS_K8s_LDAP_GROUP", help="The group that its user should access to K8s cluster"), ): """ Generate a ChristisCLI configuration file in the ~/.christisCLI location """ if not Path(get_mongo_configuration_location()).is_file(): typer.echo( f"The Database config file is not found. Please generate it via CLI and try again.", err=True) raise typer.Abort() config = {} config['ChristisCLI'] = {} CLI_Config_Location = get_christis_config_location() configFileLocation = "{0}{1}".format(CLI_Config_Location, "cli.yaml") if Path(configFileLocation).is_file(): overwrite = typer.confirm( "The CLI configuration exists, Do you want to generate it again?") if not overwrite: typer.echo("Not Generating New Configuration File") raise typer.Abort() typer.echo("Generate New Config File") config['ChristisCLI']["CHRISTIS_API_ADDRESS"] = api_address config['ChristisCLI']["CHRISTIS_API_PORT"] = api_port config['ChristisCLI']["CHRISTIS_K8s_LDAP_GROUP"] = k8s_group config['ChristisCLI'][ "CHRISTIS_MONGO_CONFIG_FILE"] = get_mongo_configuration_location() if not os.path.exists(CLI_Config_Location): os.makedirs(CLI_Config_Location) try: with open(configFileLocation, 'w') as file: configFile = yaml.dump(config, file) except Exception as e: typer.echo(e, err=True) raise typer.Abort()
def __init__(self, filename): """gets filepath for code file and config file.""" directory, argorithm_file = os.path.split(filename) argorithmID = name_check(argorithm_file[:-3]) directory = os.getcwd() if not directory else directory self.codepath = os.path.join(directory, argorithm_file) self.configpath = os.path.join(directory, argorithmID + ".config.json") if not os.path.isfile(self.codepath): msg.warn("Python file not found", 'use the init command first') raise typer.Abort() if not os.path.isfile(self.configpath): msg.warn("config file not found", 'use the configure command first') raise typer.Abort()
def read(path: Path = typer.Argument(None)): if path is None: typer.secho(f"No path provided.", fg=typer.colors.BRIGHT_RED) raise typer.Abort() elif path.is_dir(): typer.secho(f"Cannot read from a directory.", fg=typer.colors.BRIGHT_YELLOW) elif path.is_file(): typer.secho(f"Reading from path: {path}", fg=typer.colors.MAGENTA) helpers.read_or_write(file=path) elif not path.exists(): typer.secho(f"Given path: `{path}` does not exist.", fg=typer.colors.BRIGHT_RED) typer.Abort()
def bounce( what: BounceArgs = typer.Argument(...), device: str = typer.Argument(..., metavar="Device: [serial #|name|ip address|mac address]"), port: str = typer.Argument(..., ), yes: bool = typer.Option(False, "-Y", help="Bypass confirmation prompts - Assume Yes"), yes_: bool = typer.Option(False, "-y", hidden=True), debug: bool = typer.Option(False, "--debug", envvar="ARUBACLI_DEBUG", help="Enable Additional Debug Logging", callback=cli.debug_callback), default: bool = typer.Option(False, "-d", is_flag=True, help="Use default central account", callback=cli.default_callback), account: str = typer.Option("central_info", envvar="ARUBACLI_ACCOUNT", help="The Aruba Central Account to use (must be defined in the config)", callback=cli.account_name_callback), ) -> None: yes = yes_ if yes_ else yes dev = cli.cache.get_dev_identifier(device) command = 'bounce_poe_port' if what == 'poe' else 'bounce_interface' if yes or typer.confirm(typer.style(f"Please Confirm bounce {what} on {dev.name} port {port}", fg="cyan")): resp = cli.central.request(cli.central.send_bounce_command_to_device, dev.serial, command, port) typer.secho(str(resp), fg="green" if resp else "red") # !! removing this for now Central ALWAYS returns: # !! reason: Sending command to device. state: QUEUED, even after command execution. # if resp and resp.get('task_id'): # resp = cli.central.request(session.get_task_status, resp.task_id) # typer.secho(str(resp), fg="green" if resp else "red") else: raise typer.Abort()
def new_lang(lang: str = typer.Argument(..., callback=lang_callback)): """ Generate a new docs translation directory for the language LANG. LANG should be a 2-letter language code, like: en, es, de, pt, etc. """ new_path: Path = docs_root_path / lang if new_path.exists(): typer.echo(f"The language was already created: {lang}") raise typer.Abort() new_path.mkdir() new_config = get_base_lang_config(lang) new_config_path: Path = Path(new_path) / mkdocs_name new_config_path.write_text( yaml.dump(new_config, sort_keys=False, width=200, allow_unicode=True), encoding="utf-8", ) new_config_docs_path: Path = new_path / "docs" new_config_docs_path.mkdir() new_overrides_path: Path = new_path / "overrides" shutil.copytree(docs_root_path / default_lang / "overrides", new_overrides_path) default_index_path: Path = default_docs_path / "docs" / "index.md" new_index_path: Path = new_config_docs_path / "index.md" default_text = default_index_path.read_text(encoding="utf-8") lang_text = get_text_with_translate_missing(default_text) new_index_path.write_text(lang_text, encoding="utf-8") typer.secho(f"Successfully initialized: {new_path}", color=typer.colors.GREEN) update_languages(lang=None)
def delete( path: str = typer.Argument(None, help="The path of the deployment"), force: bool = typer.Option( False, help="Enable/disable removal without confirmation prompt"), ) -> NoReturn: """Delete a Quetz deployment.""" deployment_dir = Path(path) if not _is_deployment(deployment_dir): typer.echo(f'No Quetz deployment found at {path}.', err=True) raise typer.Abort() if not force and not typer.confirm(f"Delete Quetz deployment at {path}?"): raise typer.Abort() shutil.rmtree(deployment_dir)
def main(json_schema_file: str): if not os.path.isfile(json_schema_file): typer.echo(f"{json_schema_file} does not seem to exist.") raise typer.Abort() df = pd.read_json(json_schema_file) table = True if "fields" in df.columns else False if table: df["table"] = "bibl" for name, field in df[["name", "fields"]].query("fields==fields").values: tmp = pd.DataFrame.from_dict(field) tmp["table"] = name df = df.append(tmp, sort=False) df = df[df["fields"].isna()] # df = df.drop(["mode", "fields"], axis=1) if not table: ORDERED_VAR.remove("table") TEXTTT_VAR.remove("table") df = df[ORDERED_VAR] for var in TEXTTT_VAR: df[var] = df[var].apply(to_texttt) pyperclip.copy(df.set_index(ORDERED_VAR[0]).to_markdown()) typer.secho(message="Table (.md) copied to clip-board", fg=typer.colors.BLUE)
def build( host: str = typer.Argument( None, help="the hostname of the configuration to build"), pull: bool = typer.Option( default=False, help="whether to fetch current changes from the remote"), nixos: bool = False, darwin: bool = False, home_manager: bool = False, ): cfg = select(nixos=nixos, darwin=darwin, home_manager=home_manager) if cfg is None: return elif cfg == FlakeOutputs.NIXOS: cmd = "sudo nixos-rebuild build --flake" flake = f".#{host}" elif cfg == FlakeOutputs.DARWIN: flake = f".#{host}" cmd = "darwin-rebuild build --flake" elif cfg == FlakeOutputs.HOME_MANAGER: flake = f".#{host}" cmd = "home-manager build --flake" else: typer.secho("could not infer system type.", fg=Colors.ERROR.value) raise typer.Abort() if pull: git_pull() flake = f".#{host}" flags = " ".join(["--show-trace"]) run_cmd(f"{cmd} {flake} {flags}")
def setup(): # Copy repo so we can practice pulling. repo_name = os.getcwd() pull_repo_name = repo_name + PULL_SUFFIX if os.path.exists(pull_repo_name): delete = typer.confirm( f"The directory {pull_repo_name} already exists. Do you want to delete it?" ) if not delete: cli.info("Not deleting.") raise typer.Abort() cli.info(f"Deleting {pull_repo_name}.") utils.rmtree_readonly(pull_repo_name) shutil.copytree(repo_name, pull_repo_name) # Setup original repo so we can practice pushing. with open(".gsc_id", "w") as f: f.write("push_and_pull") res = subprocess.run(["git", "add", ".gsc_id"], stdout=PIPE, stderr=PIPE) if res.returncode != 0: raise setup_exercise.SetupError("Failed to add gsc_id. Contact us for help.") res = subprocess.run(["git", "commit", "-m", COMMIT_MSG], stdout=PIPE, stderr=PIPE) if res.returncode != 0: raise setup_exercise.SetupError( "Failed to setup Git Scientist. Contact us for help." )
def fetch_dir_name(self, *, dir_id: str) -> str: """ Returns info obtained regarding a directory/file from Google Drive API Remarks -------- Designed to get directory name from drive API, but can work with files as well. Internally caches directory name against folder id in `self.dirs` Always fetches info through a network call. Use `drive_name` to look through cache before making a network call """ try: result = (self.resource.files().get( fileId=dir_id, supportsAllDrives=True).execute()) if result.get("id", True) == result.get("teamDriveId", None): # Enters this block only if the `dir_id` belongs to a teamdrive result = self.resource.drives().get(driveId=dir_id).execute() # Cache directory name -- works with teamdrives and folders, id's are unique self.dirs[result["id"]] = result["name"] return result["name"] except Exception as e: typer.secho(f"Unable to find drive directory `{dir_id}`", fg=typer.colors.RED) raise typer.Abort()
def new_lang(lang: str = typer.Argument(..., callback=lang_callback)): """ Generate a new docs translation directory for the language LANG. LANG should be a 2-letter language code, like: en, es, de, pt, etc. """ new_path: Path = Path("docs") / lang if new_path.exists(): typer.echo(f"The language was already created: {lang}") raise typer.Abort() new_path.mkdir() en_docs_path = Path("docs/en") en_config_path: Path = en_docs_path / mkdocs_name en_config: dict = mkdocs.utils.yaml_load(en_config_path.read_text()) fastapi_url_base = "https://fastapi.tiangolo.com/" new_config = {} new_config["site_name"] = en_config["site_name"] new_config["site_description"] = en_config["site_description"] new_config["site_url"] = en_config["site_url"] + f"{lang}/" new_config["theme"] = en_config["theme"] new_config["theme"]["logo"] = fastapi_url_base + en_config["theme"]["logo"] new_config["theme"]["favicon"] = fastapi_url_base + en_config["theme"]["favicon"] new_config["theme"]["language"] = lang new_config["repo_name"] = en_config["repo_name"] new_config["repo_url"] = en_config["repo_url"] new_config["edit_uri"] = en_config["edit_uri"] new_config["google_analytics"] = en_config["google_analytics"] new_config["nav"] = en_config["nav"][:2] new_config["markdown_extensions"] = en_config["markdown_extensions"] new_config["extra"] = en_config["extra"] extra_css = [] css: str for css in en_config["extra_css"]: if css.startswith("http"): extra_css.append(css) else: extra_css.append(fastapi_url_base + css) new_config["extra_css"] = extra_css extra_js = [] js: str for js in en_config["extra_javascript"]: if js.startswith("http"): extra_js.append(js) else: extra_js.append(fastapi_url_base + js) new_config["extra_javascript"] = extra_js new_config_path: Path = Path(new_path) / mkdocs_name new_config_path.write_text(yaml.dump(new_config, sort_keys=False, width=200)) new_config_docs_path: Path = new_path / "docs" new_config_docs_path.mkdir() en_index_path: Path = en_docs_path / "docs" / "index.md" new_index_path: Path = new_config_docs_path / "index.md" en_index_content = en_index_path.read_text() new_index_content = f"{missing_translation_snippet}\n\n{en_index_content}" new_index_path.write_text(new_index_content) typer.secho(f"Successfully initialized: {new_path}", color=typer.colors.GREEN) update_languages(lang=None)
def new_nb(file_path: Path = typer.Argument( ..., exists=False, file_okay=True, dir_okay=False, writable=False, readable=True, resolve_path=True, )): """ Create ipynb in colab Note: Useful cmd in new projects """ if file_path is None: typer.echo("No file") raise typer.Abort() if file_path.is_file(): # folder_struct_list, upload_file_name, file_path = process_file_path(file_path) # cli_new(folder_struct_list, upload_file_name, file_path) typer.echo("file already exist try : open-nb ") pass elif file_path.is_dir(): typer.echo("is a directory") elif not file_path.exists(): file_name = os.path.basename(file_path) colab_meta_data = get_colab_metadata(file_name) with open(file_name, 'w') as fp: fp.write(json.dumps(colab_meta_data)) folder_struct_list, upload_file_name, file_path = process_file_path( file_path) cli_new(folder_struct_list, upload_file_name, file_path)
def new_lang(lang: str = typer.Argument(..., callback=lang_callback)): """ Generate a new docs translation directory for the language LANG. LANG should be a 2-letter language code, like: en, es, de, pt, etc. """ new_path: Path = Path("docs") / lang if new_path.exists(): typer.echo(f"The language was already created: {lang}") raise typer.Abort() new_path.mkdir() new_config = get_base_lang_config(lang) new_config_path: Path = Path(new_path) / mkdocs_name new_config_path.write_text( yaml.dump(new_config, sort_keys=False, width=200, allow_unicode=True), encoding="utf-8", ) new_config_docs_path: Path = new_path / "docs" new_config_docs_path.mkdir() en_index_path: Path = en_docs_path / "docs" / "index.md" new_index_path: Path = new_config_docs_path / "index.md" en_index_content = en_index_path.read_text(encoding="utf-8") new_index_content = f"{missing_translation_snippet}\n\n{en_index_content}" new_index_path.write_text(new_index_content, encoding="utf-8") typer.secho(f"Successfully initialized: {new_path}", color=typer.colors.GREEN) update_languages(lang=None)
def create_news_fragment(self): """Create a news fragment for a PR.""" choices = [option.value for option in FragmentOptions] pr_number = typer.prompt("Please enter the PR number", type=int) fragment_type = typer.prompt(f"Choose a fragment type {choices}", type=FragmentOptions) filepath = Path(self.news_fragment_path).joinpath( f"{pr_number}.{fragment_type}.md") if filepath.exists(): logger.warning("FAILED: File {} already exists.".format(filepath)) raise typer.Abort() content = typer.prompt( "Describe the changes to the END USERS.\n" "Example: New command line interface (CLI) utility for creating news fragments created.\n", type=str, ) if not Path(self.news_fragment_path).exists(): Path.mkdir(self.news_fragment_path, parents=True) with open(filepath, "w", encoding="utf-8") as fp: fp.write(content + f" (#{pr_number})") logger.info(f"Please commit the file created at: {filepath}")
def init( POSTGRES_USER: str = typer.Option("postgres", prompt=True), POSTGRES_PASSWORD: str = typer.Option( "81f4e99fd99fb97039cf755532ce2b98f308dc417c0ed6d34ba2b9f739a2e30d", prompt=True ), POSTGRES_SERVER: str = typer.Option("db", prompt=True), POSTGRES_DB: str = typer.Option("sample_db", prompt=True), PGADMIN_LISTEN_PORT: int = typer.Option(5050, prompt=True), PGADMIN_DEFAULT_EMAIL: str = typer.Option("*****@*****.**", prompt=True), PGADMIN_DEFAULT_PASSWORD: str = typer.Option( "81f4e99fd99fb97039cf755532ce2b98f308dc417c0ed6d34ba2b9f739a2e30d", prompt=True ), ) -> None: """アプリケーション起動に必要な設定ファイル(.env)を生成します。""" path = ".env" if os.path.exists(path): typer.echo(".envファイルがすでに存在します。") raise typer.Abort() obj = Env( POSTGRES_USER=POSTGRES_USER, POSTGRES_PASSWORD=POSTGRES_PASSWORD, POSTGRES_SERVER=POSTGRES_SERVER, POSTGRES_DB=POSTGRES_DB, PGADMIN_LISTEN_PORT=PGADMIN_LISTEN_PORT, PGADMIN_DEFAULT_EMAIL=PGADMIN_DEFAULT_EMAIL, PGADMIN_DEFAULT_PASSWORD=PGADMIN_DEFAULT_PASSWORD, ) with open(path, mode="w") as f: f.write(obj.to_env_file_str()) typer.echo(f"Generated {path}")
def run(check_interval: int = 300, digital_ocean_auth_token: str = None, domain: Optional[List[str]] = typer.Option(None)): """ Run process to monitor and update DNS. Update only if current_ip isn't known yet, or different from gathered IP """ known_ip = None if not domain: typer.echo("No domains provided") raise typer.Abort() while True: ip = _get_ip() if known_ip is None or ip != known_ip: logging.info(f'system public IP: {known_ip}') logging.warning(f'detected new system public IP: {ip}') for d in domain: tld, subdomain = _extract_domain_and_subdomain(d) _set_dns(tld, subdomain, ip, digital_ocean_auth_token) known_ip = ip else: known_ip = ip logging.info(f'system public IP : {known_ip}') time.sleep(check_interval)