def _create_secrets(ctx, dot_secret_path): if os.path.isfile(dot_secret_path): dot_secret = DotEnv(dotenv_path=dot_secret_path) secret_config = dot_secret.dict() for secret_name, secret_value in secret_config.items(): ctx.run( f"echo {secret_value} | docker secret create {secret_name} -")
def load(self, cfile): cfile = Path(cfile).expanduser() if not cfile.is_file(): return {} try: from dotenv.main import DotEnv except ImportError: # pragma: no cover raise FormatNotSupported(".env, need python-dotenv.") # default_A = 1 conf = DotEnv(cfile).dict() if not self.with_profile: return {key: Loader.type_cast(val) for key, val in conf.items()} ret = {} for key, val in conf.items(): if "_" not in key: continue profile, realkey = key.split("_", 1) ret.setdefault(profile, {}).update({realkey: Loader.type_cast(val)}) return ret
def load_env(): env_path = os.environ.get('ENV_PATH', './.env') print(f"* Env Path: {env_path}") dotenv = DotEnv(env_path, verbose=False) dotenv_dict = dotenv.dict() print("* Loaded .env") for k, v in dotenv_dict.items(): print(f'** {k}={v}') dotenv.set_as_environment_variables(override=True)
def load_dotenv(dotenv_path=None, stream=None, verbose=False, override=False): f = dotenv_path or stream or find_dotenv(usecwd=True) return DotEnv( f, verbose=verbose).set_as_environment_variables(override=override)
def get_processes(services) -> Iterable[Coroutine]: for name, service in services.items(): env: dict = dict(os.environ) if "env_file" in service: env_file = service["env_file"] if not isinstance(env_file, list): env_file = [env_file] env.update(merge(*[DotEnv(path) for path in env_file])) if "environment" in service: environment = service["environment"] if isinstance(environment, list): env.update({b.key: b.value for b in lmap(parse_binding, environment)}) else: env.update(environment) cmd = service.get("entrypoint", "") + " " + service.get("command") if not cmd: raise Exception("cannot run without commands on the config") build = service["build"] if isinstance(build, str): cwd = build else: cwd = build.get("context", ".") async def f(name, cmd, env, cwd): print("Attaching to " + name) color = random.choice(colors) log = lambda x: sys.stdout.write(getattr(Fore, color) + f"{name} | " + Fore.RESET + x) p = await exec(cmd, env=env, cwd=cwd, stdout=log, stderr=log) if p: log(f"{name} exited with code {p.returncode}" + "\n") yield f(name, cmd, env, cwd)
def prepare(notebook_id: str, dev: bool = False): # Legacy. This will be removed in an upcoming release. # actual setup dot_env = DotEnv(find_dotenv()) dot_env.set_as_environment_variables() # the rest displays information requirement_name_mapping = _get_requirement_name_mapping(dev=dev) notebook = _get_notebook(notebook_id, dev=dev) requirements = [ requirement_name_mapping.get(req, req) for req in notebook.get("requirements", []) ] info = dedent(f""" ***Notebook Title*** {notebook['name']} ***Notebook Description*** {notebook['description']} """) if requirements: info += dedent(""" ***Notebook Dependencies*** This notebook requires an active subscription to: """) info += "".join(f"* {req}\n" for req in requirements) info += dedent(""" --------- *API credentials have automatically been injected for your active subscriptions.* The following environment variables are now available: """) info += "".join(f"* `{k}`\n" for k in dot_env.dict().keys()) info += "\n-------------\n" display(Markdown(info))
def setup_environment_variables(): """Called in every notebook to inject credentials to environment""" dot_env = DotEnv(find_dotenv()) dot_env.set_as_environment_variables() info = ( "API credentials have automatically been injected for your active subscriptions. \n" + "The following environment variables are now available:\n" + _format_env_list(dot_env.dict().keys()) + "\n") user_dot_env_path = "~/custom.env" user_dot_env = DotEnv(os.path.expanduser(user_dot_env_path)) # NOTE: override is not True by default in dotenv user_dot_env.set_as_environment_variables() user_vars = user_dot_env.dict() if user_vars: info += ( f"The following additional environment variables have been loaded from `{user_dot_env_path}`:\n" + _format_env_list(user_vars.keys())) display(Markdown(info))
def end_instance(): USER_VARIABLES = DotEnv(find_dotenv()).dict() GCE_JSON_CERT = os.path.join(os.getcwd(), USER_VARIABLES['GCE_JSON_CERT']) GCE_PROJECT_NAME = USER_VARIABLES['GCE_PROJECT_NAME'] GCE_ZONE = USER_VARIABLES['GCE_ZONE'] GCE_USER = USER_VARIABLES['GCE_USER'] credentials = service_account.Credentials.from_service_account_file( GCE_JSON_CERT) compute = googleapiclient.discovery.build('compute', 'v1', credentials=credentials) result = compute.instances().list(project=GCE_PROJECT_NAME, zone=GCE_ZONE).execute() instance_name = result['items'][0]['name'] request_id = compute.instances().stop(project=GCE_PROJECT_NAME, zone=GCE_ZONE, instance=instance_name).execute() wait_for_operation(compute, GCE_PROJECT_NAME, GCE_ZONE, request_id['name']) result = compute.instances().list(project=GCE_PROJECT_NAME, zone=GCE_ZONE).execute()
def read(self) -> Dict[str, str]: return DotEnv(self.source, interpolate=False).dict()
docker run --rm -it -v $PWD/../src/config:/config -v $PWD:/working eurodatacube/jupyter-user:0.19.6 /opt/conda/envs/eurodatacube-0.19.6/bin/python3 /working/generate_files.py If issues with write permission you might have to add a user as parameter with the same user id as your local account, e.g. "--user 1001" """ import os, shutil import json import csv import datetime import collections import requests from dotenv.main import find_dotenv, DotEnv from xcube_geodb.core.geodb import GeoDBClient dot_env = DotEnv("/working/.env") dot_env.set_as_environment_variables() geodb = GeoDBClient() envs = dot_env.dict() # Function to fetch all available dates for BYOD collections # Make sure all appropiate collection ids are set in your docker environment COLLECTIONS = [ "N3_CUSTOM", "N3_CUSTOM_TSMNN", "E12C_NEW_MOTORWAY", "E12D_NEW_PRIMARYROADS", "ICEYE-E3", "ICEYE-E11", "ICEYE-E11A", "ICEYE-E12B",
def load_dotenv(file, env): from dotenv.main import DotEnv from dotenv.compat import to_env for key, value in DotEnv(file).dict().items(): env[to_env(key)] = to_env(value)
print('Waiting for operation to finish...') while True: result = compute.zoneOperations().get(project=project, zone=zone, operation=operation).execute() if result['status'] == 'DONE': print("done.") if 'error' in result: raise Exception(result['error']) return result time.sleep(1) USER_VARIABLES = DotEnv(find_dotenv()).dict() GCE_JSON_CERT = os.path.join(os.getcwd(), USER_VARIABLES['GCE_JSON_CERT']) GCE_PROJECT_NAME = USER_VARIABLES['GCE_PROJECT_NAME'] GCE_ZONE = USER_VARIABLES['GCE_ZONE'] GCE_USER = USER_VARIABLES['GCE_USER'] def start_message(ip_address): print("Connect via") print('ssh gce') launch_jupyter = ' '.join(['ssh gce "jupyter notebook --port=8888"']) print("If you want to launch Jupyer:") print(launch_jupyter) print('''Connect to it:
import sys def env_path(): from pathlib import Path env_path = Path('..') / '.env' return env_path if __name__ == '__main__': from dotenv.main import DotEnv env_path = env_path() print(f"* Env Path: {env_path}") dotenv = DotEnv(env_path, verbose=False) dotenv_dict = dotenv.dict() print("* Loaded .env") for k, v in dotenv_dict.items(): print(f'** {k}={v}') dotenv.set_as_environment_variables(override=True) try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?") from exc execute_from_command_line(sys.argv)
def _load_dotenv(self, path): if not path.exists(): return _merge_env = DotEnv(dotenv_path=path, encoding="utf8").dict() app.env += _merge_env
def _remove_secrets(ctx, dot_secret_path): if os.path.isfile(dot_secret_path): dot_secret = DotEnv(dotenv_path=dot_secret_path) secret_config = dot_secret.dict() for secret_name, secret_value in secret_config.items(): ctx.run(f"docker secret rm {secret_name}")