async def get_access_token(http_client: httpx.AsyncClient, app_name: str) -> dict: """ use service account to generate access token / generate service account if missing """ service_account_path = (pathlib.Path(typer.get_app_dir(app_name)) / COCOHUB_SERVICE_ACCOUNT) if service_account_path.exists(): with service_account_path.open("r") as f: service_account: CoCoHubServiceAccount = CoCoHubServiceAccount.validate( json.load(f)) else: if typer.confirm( "No service account. do you want to generate one now?"): service_account = generate_service_account(service_account_path) else: raise Exception( "No service account. try again after authenticating at CoCoHub" ) try: http_response = await http_client.post( COCOHUB_TOKEN_URL, auth=(service_account.client_id, service_account.client_secret), headers={"content-length": "0"}, ) http_response.raise_for_status() access_token = http_response.json() return access_token except httpx.exceptions.HTTPError as err: if err.response.status_code == 401: if typer.confirm( "\n Service account expired. do you want to generate a new one now?" ): service_account_path = ( pathlib.Path(typer.get_app_dir(app_name)) / COCOHUB_SERVICE_ACCOUNT) os.remove(service_account_path) service_account = generate_service_account( service_account_path) http_response = await http_client.post( COCOHUB_TOKEN_URL, auth=(service_account.client_id, service_account.client_secret), headers={"content-length": "0"}, ) http_response.raise_for_status() access_token = http_response.json() return access_token else: raise Exception( "No service account. try again after authenticating at CoCoHub" )
def cli( log_level: str = typer.Option("WARNING", help="Logging Level"), log_data: bool = typer.Option(False, help="Choose output format type"), log_scan: bool = typer.Option(False, help="Log data that was scanned"), config_path: Path = typer.Option(typer.get_app_dir("tokern"), help="Path to config directory"), output_format: OutputFormat = typer.Option(OutputFormat.tabular, case_sensitive=False), catalog_path: Path = typer.Option( None, help="Path to store catalog state. Use if NOT using a database"), catalog_host: str = typer.Option( None, help="hostname of Postgres database. Use if catalog is a database."), catalog_port: int = typer.Option( None, help="port of Postgres database. Use if catalog is a database."), catalog_user: str = typer.Option( None, help="user of Postgres database. Use if catalog is a database."), catalog_password: str = typer.Option( None, help="password of Postgres database. Use if catalog is a database."), catalog_database: str = typer.Option( None, help="database of Postgres database. Use if catalog is a database."), catalog_secret: str = typer.Option( dbcat.settings.DEFAULT_CATALOG_SECRET, help="Secret to encrypt sensitive data like passwords in the catalog.", ), version: Optional[bool] = typer.Option(None, "--version", callback=version_callback, is_eager=True), ): logging.config.dictConfig(log_config(log_level=log_level.upper())) if log_scan: handler = logging.StreamHandler() handler.setFormatter(jsonlogger.JsonFormatter()) handler.setLevel(logging.INFO) scan_logger.addHandler(handler) LOGGER.debug("SCAN LOG setup") if log_data: handler = logging.StreamHandler() handler.setFormatter(jsonlogger.JsonFormatter()) handler.setLevel(logging.INFO) data_logger.addHandler(handler) LOGGER.debug("DATA LOG setup") app_dir_path = Path(config_path) app_dir_path.mkdir(parents=True, exist_ok=True) dbcat.settings.CATALOG_PATH = catalog_path dbcat.settings.CATALOG_USER = catalog_user dbcat.settings.CATALOG_PASSWORD = catalog_password dbcat.settings.CATALOG_HOST = catalog_host dbcat.settings.CATALOG_PORT = catalog_port dbcat.settings.CATALOG_DB = catalog_database dbcat.settings.CATALOG_SECRET = catalog_secret dbcat.settings.APP_DIR = app_dir_path dbcat.settings.OUTPUT_FORMAT = output_format
def _get_default_config_paths() -> typing.Tuple: result = [ Path('/etc/dominode/dominode-bootstrapper.conf'), Path(typer.get_app_dir('dominode-bootstrapper')) / 'config.conf', ] from_env_path = os.getenv('DOMINODE_BOOTSTRAPPER_CONFIG_PATH') if from_env_path: result.append(Path(from_env_path)) return tuple(result)
def main(): app_dir = typer.get_app_dir(APP_NAME) app_dir_path = Path(app_dir) app_dir_path.mkdir(parents=True, exist_ok=True) config_path: Path = Path(app_dir) / "config.json" if not config_path.is_file(): config_path.write_text('{"version": "1.0.0"}') config_file_str = str(config_path) print("Opening config directory") typer.launch(config_file_str, locate=True)
def _load_catalog(self) -> Generator[TableMetadata, None, None]: """ Create an iterator. """ LOGGER.debug(self.catalog_config.as_plain_ordered_dict()) catalog = open_catalog( app_dir=Path(typer.get_app_dir("tokern")), **self.catalog_config.as_plain_ordered_dict() ) with closing(catalog) as catalog: with catalog.managed_session: if self.source_names is not None and len(self.source_names) > 0: sources = [ catalog.get_source(source_name) for source_name in self.source_names ] else: sources = catalog.get_sources() for source in sources: for schema, table in table_generator( catalog=catalog, source=source, include_schema_regex_str=self.include_schema_regex, exclude_schema_regex_str=self.exclude_schema_regex, include_table_regex_str=self.include_table_regex, exclude_table_regex_str=self.exclude_table_regex, ): columns = [] for column in catalog.get_columns_for_table(table): badges = [] if column.pii_type is not None: badges.append("pii") badges.append(column.pii_type.name) columns.append( ColumnMetadata( name=column.name, description="", col_type=column.data_type, sort_order=column.sort_order, badges=badges, ) ) yield TableMetadata( database=source.database, cluster=source.name, schema=schema.name, name=table.name, columns=columns, description="", )
def config_dir(self) -> Path: """A pathlib.Path for the application-specific configuration directory. Configuration files and cached catalog CSV files are kept here. """ try: return self._config_dir except AttributeError: pass self._config_dir = Path(typer.get_app_dir(__package__)) self._config_dir.mkdir(mode=0o755, exist_ok=True) return self._config_dir
def edit(): """Edit catalogues Work in progress """ app_path = Path(typer.get_app_dir(APP_NAME)) if not app_path.is_dir(): app_path.mkdir() config_path: Path = app_path / "config.yaml" if not config_path.is_file(): config_path.write_text('{"version": "1.0.0"}') print(config_path) typer.launch(str(config_path), locate=True)
def ensure_model_dir(model_dir_path: Union[Path, None] = None) -> Path: """Checks for a local model dir and creates one if not found""" if not model_dir_path: app_dir = typer.get_app_dir(APP_NAME) model_dir: Path = Path(app_dir) / "models" else: model_dir = Path(model_dir_path) / "models" if not (model_dir.exists() and model_dir.is_dir()): typer.echo(f"Creating directory for storing models in {model_dir}...") try: model_dir.mkdir(parents=True) except PermissionError as e: # pragma: no cover typer.echo(f"{model_dir} is not writeable: {e}") raise typer.Exit(code=1) typer.echo(f"Models stored in {model_dir}") return model_dir
def create(alias: str = typer.Argument(...)): config = ConfigController.parse_config() app_dir = typer.get_app_dir(APP_NAME) if len(config.blocks): filtered_blocks = list( filter(lambda x: x.alias == alias, config.blocks)) if not len(filtered_blocks): typer.secho(f"Записи с алиасом {alias} не существует.", fg=typer.colors.RED) else: backup_block = filtered_blocks[0] os.chdir(app_dir) try: os.mkdir('backups') except: pass os.chdir('backups') now = datetime.datetime.now() backup_path = f"{backup_block.alias}/{now.strftime('%Y-%m-%d')}/{now.strftime('%H_%M')}" try: os.makedirs(backup_path, exist_ok=True) except: pass os.chdir(backup_path) local_dir = os.getcwd() ftp_obj = ftplib.FTP(host=backup_block.hostname, user=backup_block.login, passwd=backup_block.password) ftp_obj.set_pasv(False) backup_directory(ftp_obj=ftp_obj, local_dir=local_dir, remote_dir=backup_block.dir_to_backup) ftp_obj.quit() typer.secho("Backup created successfully. Compressing...", fg=typer.colors.BLUE) zipf = zipfile.ZipFile(f"{local_dir}.zip", 'w', zipfile.ZIP_DEFLATED) zipdir(local_dir, zipf) zipf.close() shutil.rmtree(local_dir, ignore_errors=False, onerror=errorRemoveReadonly) typer.secho("Backup compressed. Done", fg=typer.colors.GREEN) else: typer.echo("Nothing to backup...")
def cli( log_level: str = typer.Option("WARNING", help="Logging Level"), output_format: OutputFormat = typer.Option(OutputFormat.tabular, case_sensitive=False), catalog_path: Path = typer.Option( None, help="Path to store catalog state. Use if NOT using a database"), catalog_host: str = typer.Option( None, help="hostname of Postgres database. Use if catalog is a database."), catalog_port: int = typer.Option( None, help="port of Postgres database. Use if catalog is a database."), catalog_user: str = typer.Option( None, help="user of Postgres database. Use if catalog is a database."), catalog_password: str = typer.Option( None, help="password of Postgres database. Use if catalog is a database."), catalog_database: str = typer.Option( None, help="database of Postgres database. Use if catalog is a database."), catalog_secret: str = typer.Option( "TOKERN_CATALOG_SECRET", help="Secret to encrypt sensitive data like passwords in the catalog.", ), version: Optional[bool] = typer.Option(None, "--version", callback=version_callback), ): logging.config.dictConfig(log_config(log_level=log_level.upper())) app_dir = typer.get_app_dir("tokern") app_dir_path = Path(app_dir) app_dir_path.mkdir(parents=True, exist_ok=True) dbcat.settings.CATALOG_PATH = catalog_path dbcat.settings.CATALOG_USER = catalog_user dbcat.settings.CATALOG_PASSWORD = catalog_password dbcat.settings.CATALOG_HOST = catalog_host dbcat.settings.CATALOG_PORT = catalog_port dbcat.settings.CATALOG_DB = catalog_database dbcat.settings.CATALOG_SECRET = catalog_secret dbcat.settings.APP_DIR = app_dir_path dbcat.settings.OUTPUT_FORMAT = output_format
def get_prefixes_file(ignore=False) -> Path: """Get the application configuration file""" app_dir = typer.get_app_dir('riordinato') config = Path(app_dir) / "config.json" if Path('prefixes.json').exists() and not ignore: file = 'prefixes.json' with open(file, 'r') as jfile: if not jfile.readlines(): create_file(file) else: file = str(config) if not config.exists(): config.touch() with open(file, 'r') as jfile: if not jfile.readlines(): create_file(file) return file
def cli( log_level: str = typer.Option("WARNING", help="Logging Level"), output_format: OutputFormat = typer.Option(OutputFormat.tabular, case_sensitive=False), catalog_path: Path = typer.Option( None, help="Path to store catalog state. Use if NOT using a database"), catalog_host: str = typer.Option( None, help="hostname of Postgres database. Use if catalog is a database."), catalog_port: int = typer.Option( None, help="port of Postgres database. Use if catalog is a database."), catalog_user: str = typer.Option( None, help="user of Postgres database. Use if catalog is a database."), catalog_password: str = typer.Option( None, help="password of Postgres database. Use if catalog is a database."), catalog_database: str = typer.Option( None, help="database of Postgres database. Use if catalog is a database."), version: Optional[bool] = typer.Option(None, "--version", callback=version_callback), ): logging.config.dictConfig(log_config(log_level=log_level.upper())) app_dir = typer.get_app_dir("tokern") app_dir_path = Path(app_dir) app_dir_path.mkdir(parents=True, exist_ok=True) app_state["catalog_connection"] = { "path": catalog_path, "user": catalog_user, "password": catalog_password, "host": catalog_host, "port": catalog_port, "database": catalog_database, "app_dir": app_dir_path, } app_state["output_format"] = output_format
from colab_cli.cli_new import cli_new from colab_cli.cli_pull import cli_pull from colab_cli.cli_open import cli_open from shutil import copy from pathlib import Path import typer import json import glob import os from colab_cli.cli_push import cli_push from colab_cli.utilities.colab_metadata import get_colab_metadata from colab_cli.utilities.path_process import process_file_path APP_NAME = "colab-cli" app_dir = typer.get_app_dir(APP_NAME) app_dir = Path(app_dir) config_path = app_dir / 'config.json' app = typer.Typer() @app.callback() def callback(): """ Experience better workflow with google colab, local jupyter notebooks and git """ @app.command() def set_auth_user(user_no: str):
class Globals(BaseModel): config_dir = Path(typer.get_app_dir("cycl")) project_dir = Path.cwd() proj_config = Path.cwd() / "cycl.yaml" servers: Dict[str, Server] = {} proj = ProjectSettings()
""" import os import sys import re import json import traceback import requests import typer from halo import Halo import ARgorithmToolkit from ARgorithmToolkit.security import injection_check, execution_check from ARgorithmToolkit.parser import input_data, create, validateconfig, ValidationError CLOUD_URL = "https://argorithm.el.r.appspot.com" CACHE_DIR = typer.get_app_dir("ARgorithm") app = typer.Typer(help="ARgorithm CLI") class Messager(): """Class for pretty printing messages using typer.""" def msg(self, tag: str, title: str, message: str, color: str): """Pretty messaging for standard log messages.""" code = typer.style(f"[{tag.upper()}]: {title.upper()}", fg=color, bold=True) typer.echo(code) if message: typer.echo(message)
def get_default_logger_config(): return str(Path(typer.get_app_dir('tgl2rdm')) / 'logger.toml')
# -*- coding: utf-8 -*- from pathlib import Path import typer APP_NAME = "wusa" WUSA_BASE_DIR = Path( typer.get_app_dir(APP_NAME, roaming=False, force_posix=True)) WUSA_BASE_DIR.mkdir(parents=True, exist_ok=True) WUSA_RUNNER_DIR = WUSA_BASE_DIR / "runners" WUSA_RUNNER_DIR.mkdir(parents=True, exist_ok=True)
import logging from pathlib import Path from typing import Optional from bleak import BleakError from idazen import core import typer package_name = "idazen" try: __version__ = metadata.version(package_name) except metadata.PackageNotFoundError: __version__ = "dev" cli = typer.Typer() app_dir = Path(typer.get_app_dir(package_name)) app_dir.mkdir(parents=True, exist_ok=True) config_path: Path = Path(app_dir) / "config.json" logging.debug(config_path) def read(): if not config_path.exists(): return {} with open(config_path, "r") as f: return json.load(f) def write(**kwargs): doc = read() doc.update(kwargs)
"""Набор настроек для проекта.""" import os from pathlib import Path import typer from time_logger import __project_name__ as application _environment_directory = os.getenv(application.upper() + "_DIRECTORY") if _environment_directory: APPLICATION_DATA = Path(_environment_directory).resolve() else: APPLICATION_DATA = Path(typer.get_app_dir(application)).resolve() DATABASE = APPLICATION_DATA / "database.json" TIME_FORMAT = "%H:%M" BACKUP_FORMAT = "%Y-%m-%d-%H-%M-%S-%f-database-backup.json"
def get_workunits(self) -> Iterable[WorkUnit]: catalog = open_catalog( app_dir=Path(typer.get_app_dir("tokern")), secret=self.config.secret, path=Path(self.config.path) if self.config.path is not None else None, user=self.config.user, password=self.config.password, host=self.config.host, port=self.config.port, database=self.config.database, ) with closing(catalog) as catalog: with catalog.managed_session: if (self.config.source_names is not None and len(self.config.source_names) > 0): sources = [ catalog.get_source(source_name) for source_name in self.config.source_names ] else: sources = catalog.get_sources() for source in sources: for schema, table in table_generator( catalog=catalog, source=source, include_schema_regex_str=self.config. include_schema_regex, exclude_schema_regex_str=self.config. exclude_schema_regex, include_table_regex_str=self.config. include_table_regex, exclude_table_regex_str=self.config. exclude_table_regex, ): if self.config.include_source_name: dataset_name = f"{source.name}.{schema.name}.{table.name}" else: dataset_name = f"{schema.name}.{table.name}" self.report.report_entity_scanned(dataset_name) dataset_urn = f"urn:li:dataset:(urn:li:dataPlatform:{source.source_type},{dataset_name},{self.config.env})" dataset_snapshot = DatasetSnapshot( urn=dataset_urn, aspects=[], ) schema_fields = [] for column in catalog.get_columns_for_table(table): global_tags: Optional[GlobalTagsClass] = None if column.pii_type is not None: global_tags = GlobalTagsClass(tags=[ TagAssociationClass("urn:li:tag:pii"), TagAssociationClass( f"urn:li:tag:{column.pii_type.name.lower()}" ), ]) schema_fields.append( SchemaField( fieldPath=column.name, type=CatalogSource.get_column_type( column.data_type), nativeDataType=column.data_type, description=None, nullable=True, recursive=False, globalTags=global_tags, )) schema_metadata = get_schema_metadata( sql_report=self.report, dataset_name=dataset_name, platform=source.source_type, columns=[], canonical_schema=schema_fields, ) dataset_snapshot.aspects.append(schema_metadata) mce = MetadataChangeEvent( proposedSnapshot=dataset_snapshot) wu = SqlWorkUnit(id=dataset_name, mce=mce) self.report.report_workunit(wu) yield wu
def path(cls) -> Path: return Path(typer.get_app_dir("demo")) / "config.json"
def __init__(self, mode: HandlerMode): app_dir = typer.get_app_dir(APP_NAME) self.file_path = pathlib.Path(app_dir) / "config.json" self.mode = mode self.file = None
# flake8: noqa import json import os import typer from pathlib import Path import sys _calling_script = Path(sys.argv[0]) if str(_calling_script) == "." and os.environ.get("TERM_PROGRAM") == "vscode": _calling_script = Path.cwd() / "cli.py" # vscode run in python shell if _calling_script.name == "cencli": base_dir = Path(typer.get_app_dir(__name__)) elif _calling_script.name.startswith("test_"): base_dir = _calling_script.parent.parent elif "centralcli" in Path(__file__).parts: base_dir = Path(__file__).parent while base_dir.name != "centralcli": base_dir = base_dir.parent base_dir = base_dir.parent else: base_dir = _calling_script.resolve().parent if base_dir.name == "centralcli": base_dir = base_dir.parent else: print("Warning Logic Error in git/pypi detection") print(f"base_dir Parts: {base_dir.parts}")
from names_transliteration.download import url_exists, download_from_url from names_transliteration.model.nmt import get_model from names_transliteration.model.save import ( load_keras_tokenizer_json, load_model_metadata, ) from names_transliteration.model.transliterate import ( transliterate, LettersNotInTokenizerException, ) logger = logging.getLogger(__name__) logger.setLevel("INFO") logger.addHandler(logging.StreamHandler()) MODEL_PATH = Path(typer.get_app_dir(APP_NAME)) / "model" # This is the main app app itself, which appears when the user selects "Run the app". def run_the_app(): # To make Streamlit fast, st.cache allows us to reuse computation across runs. # In this common pattern, we download data from an endpoint only once. @st.cache(show_spinner=False) def download_pre_trained_model(): if not url_exists(PRE_TRAINED_MODEL_URL): raise Exception( f"{PRE_TRAINED_MODEL_URL} does nost exist or is unreachable.") logger.info("Create temp dir to download model") temp_directory = Path(tempfile.gettempdir())
from pathlib import Path import typer app_dir = Path(typer.get_app_dir("tasks-watcher"))
def main(): app_dir = typer.get_app_dir(APP_NAME) config_path: Path = Path(app_dir) / "config.json" if not config_path.is_file(): typer.echo("Config file doesn't exist yet")
"""This module provides the RP To-Do config functionality.""" # rptodo/config.py # -*- coding: utf-8 -*- import configparser from pathlib import Path import typer from rptodo import (DB_WRITE_ERROR, DIR_ERROR, FILE_ERROR, SUCCESS, __app_name__) CONFIG_DIR_PATH = Path(typer.get_app_dir(__app_name__)) CONFIG_FILE_PATH = CONFIG_DIR_PATH / "config.ini" def init_app(db_path: str) -> int: """Initialize the application.""" config_code = _init_config_file() if config_code != SUCCESS: return config_code database_code = _create_database(db_path) if database_code != SUCCESS: return database_code return SUCCESS def _init_config_file() -> int: try: CONFIG_DIR_PATH.mkdir(exist_ok=True) except OSError:
import os from datetime import datetime from pathlib import Path from configparser import ConfigParser from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from contextlib import contextmanager from models import Base, Record import typer app = typer.Typer() APP_NAME = "microbiome" APP_DIR = typer.get_app_dir(APP_NAME) APP_CONFIG: Path = Path(APP_DIR) / "config.ini" config = ConfigParser() @contextmanager def session_scope(): config.read_file(open(APP_CONFIG)) DATABASE_URI = config.get("DATABASE", "uri") engine = create_engine(DATABASE_URI) Session = sessionmaker(bind=engine) session = Session() try: yield session session.commit()