import os from environs import Env basedir = os.path.abspath(os.path.dirname(__file__)) env = Env() env.read_env(".env", recurse=False) class Config: DEBUG = env.bool("DEBUG", default=False) SECRET_KEY = env.str("SECRET_KEY", default="SecretKey") class DevelopmentConfig(Config): DEBUG = True DATABASE_URL = env.str( "DATABASE_URL", default="postgres://*****:*****@127.0.0.1:5432/expense_tracker") SQLALCHEMY_TRACK_MODIFICATIONS = False class TestingConfig(Config): DEBUG = True TESTING = True DATABASE_URL = env.str( "DATABASE_URL", default="postgres://*****:*****@127.0.0.1:5432/expense_tracker") PRESERVE_CONTEXT_ON_EXCEPTION = False SQLALCHEMY_TRACK_MODIFICATIONS = False
# https://12factor.net and https://12factor.net/config # # Note about PyLint static code analyzer: items disable are false positives. # ## # pylint: disable=too-few-public-methods; # In order to avoid false positives with Flask from os import environ, path from environs import Env ENV_FILE = path.join(path.abspath(path.dirname(__file__)), '.env') if path.exists(ENV_FILE): ENVIR = Env() ENVIR.read_env() else: print('Error: .env file not found') exit(code=1) class Config: """ This is the generic loader that sets common attributes """ JSON_SORT_KEYS = False DEBUG = True TESTING = True class Development(Config): """ Development loader """
from subprocess import Popen import discord from discord.ext import commands import logging import os from random import randint from Globals import prefix try: discord_api = os.environ['discord'] my_user_id = 0 except KeyError: from environs import Env env = Env() # reads from .env file env.read_env() my_user_id = int(os.environ['my_user_id']) discord_api = os.environ['discord'] logger = logging.getLogger('discord') logger.setLevel(logging.DEBUG) handler = logging.FileHandler(filename='discord.log', encoding='utf-8', mode='w') handler.setFormatter( logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s')) logger.addHandler(handler) bot = commands.Bot(command_prefix=prefix) bot.remove_command('help') def format_cogs(cogs_dir='cogs'):
""" Django settings for config project. Generated by 'django-admin startproject' using Django 3.1.4. For more information on this file, see https://docs.djangoproject.com/en/3.1/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/3.1/ref/settings/ """ from pathlib import Path from environs import Env # new env = Env() # new env.read_env() # new # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = env.str("SECRET_KEY") # SECURITY WARNING: don't run with debug turned on in production! DEBUG = env.bool("DEBUG", default=False) ALLOWED_HOSTS = ['.herokuapp.com', 'localhost', '127.0.0.1']
def buildURI_Injected_StaticKVsecrets(): env = Env() # Read .env into os.environ env.read_env('/app/secrets/.envapp') Database.USER = env('DB_USER') Database.PASSWORD = env('DB_PASSWORD')
class Database(object): ENCRYPT = True env = Env() # Read .env into os.environ env.read_env() # SERVER = env('DB_SERVER') SERVER = os.environ['DB_SERVER'] print(f'This is the DB Server: {SERVER}') # PORT = env('DB_PORT') PORT = os.environ['DB_PORT'] VAULT_URL = env('VAULT_URL') # Uncomment USER and PASSWORD below to grab creds from .env file # USER = env('DB_USER') # PASSWORD = env('DB_PASSWORD') # Uncomment USER and PASSWORD below to show Vault's functionality USER = None PASSWORD = None URI = '' DATABASE = None @staticmethod def getDynamicSecret_API(vault_token): response = requests.get(f'{Database.VAULT_URL}/v1/mongodb_nomad/creds/mongodb-nomad-role', params={'q': 'requests+language:python'}, headers={'X-Vault-Token': vault_token}, ) json_response = response.json() print(f'response is:') print(json_response) Database.USER = json_response['data']['username'] Database.PASSWORD = json_response['data']['password'] @staticmethod def buildURI_Injected_StaticKVsecrets(): env = Env() # Read .env into os.environ env.read_env('/app/secrets/.envapp') Database.USER = env('DB_USER') Database.PASSWORD = env('DB_PASSWORD') @staticmethod def buildURI_Injected_DynamicSecrets(): with open("/secrets/vault_token") as f: VAULT_TOKEN = f.readlines() VAULT_TOKEN = VAULT_TOKEN[0].strip('\n') print(f'Vault token = {VAULT_TOKEN}') Database.getDynamicSecret_API(VAULT_TOKEN) @staticmethod def initialize(): # Uncomment the 2 lines below to show Vault grabbing static secrets that were injected by the K8s injector # print('Initializing Database using Static Injected Secrets from Vault') # Database.buildURI_Injected_StaticKVsecrets() # Uncomment the 2 lines below to show Vault grabbing Dynamic secrets by utilizing an injected Vault token by the K8s injector print('Initializing Database using Dynamic Secrets from Vault') Database.buildURI_Injected_DynamicSecrets() Database.URI = f'mongodb://{Database.USER}:{Database.PASSWORD}@{Database.SERVER}:{Database.PORT}' print(f'Server: {Database.SERVER} and PORT: {Database.PORT} and user: {Database.USER} and password: {Database.PASSWORD}') client = pymongo.MongoClient(Database.URI) if Database.ENCRYPT: Database.DATABASE = client['webblogencrypted'] else: Database.DATABASE = client['webblog'] @staticmethod def insert(collection, data): Database.DATABASE[collection].insert(data) @staticmethod def find(collection, query): return Database.DATABASE[collection].find(query) @staticmethod def find_one(collection, query): try: return Database.DATABASE[collection].find_one(query) except pymongo.errors.OperationFailure: print(f'mongoDB auth failed due to creds expiring. Rotating creds now') Database.initialize() return Database.DATABASE[collection].find_one(query)
async def showdown(): env = Env() env.read_env() config.log_to_file = env.bool("LOG_TO_FILE", config.log_to_file) config.save_replay = env.bool("SAVE_REPLAY", config.save_replay) config.decision_method = env("DECISION_METHOD", config.decision_method) config.use_relative_weights = env.bool("USE_RELATIVE_WEIGHTS", config.use_relative_weights) config.gambit_exe_path = env("GAMBIT_PATH", config.gambit_exe_path) config.search_depth = int(env("MAX_SEARCH_DEPTH", config.search_depth)) config.greeting_message = env("GREETING_MESSAGE", config.greeting_message) config.battle_ending_message = env("BATTLE_OVER_MESSAGE", config.battle_ending_message) logger.setLevel(env("LOG_LEVEL", "DEBUG")) websocket_uri = env("WEBSOCKET_URI", "sim.smogon.com:8000") username = env("PS_USERNAME") password = env("PS_PASSWORD", "") bot_mode = env("BOT_MODE") team_name = env("TEAM_NAME", None) pokemon_mode = env("POKEMON_MODE", constants.DEFAULT_MODE) run_count = int(env("RUN_COUNT", 1)) apply_mods(pokemon_mode) original_pokedex = deepcopy(pokedex) original_move_json = deepcopy(all_move_json) ps_websocket_client = await PSWebsocketClient.create( username, password, websocket_uri) await ps_websocket_client.login() team = load_team(team_name) battles_run = 0 wins = 0 losses = 0 while True: if bot_mode == constants.CHALLENGE_USER: user_to_challenge = env("USER_TO_CHALLENGE") await ps_websocket_client.challenge_user(user_to_challenge, pokemon_mode, team) elif bot_mode == constants.ACCEPT_CHALLENGE: await ps_websocket_client.accept_challenge(pokemon_mode, team) elif bot_mode == constants.SEARCH_LADDER: await ps_websocket_client.search_for_match(pokemon_mode, team) else: raise ValueError("Invalid Bot Mode") winner = await pokemon_battle(ps_websocket_client, pokemon_mode) if winner == username: wins += 1 else: losses += 1 logger.info("\nW: {}\nL: {}\n".format(wins, losses)) if original_move_json != all_move_json: logger.critical( "Move JSON changed!\nDumping modified version to `modified_moves.json`" ) with open("modified_moves.json", 'w') as f: json.dump(all_move_json, f, indent=4) exit(1) else: logger.debug("Move JSON unmodified!") if original_pokedex != pokedex: logger.critical( "Pokedex JSON changed!\nDumping modified version to `modified_pokedex.json`" ) with open("modified_pokedex.json", 'w') as f: json.dump(pokedex, f, indent=4) exit(1) else: logger.debug("Pokedex JSON unmodified!") battles_run += 1 if battles_run >= run_count: break
from environs import Env env_db: Env = Env() env_db.read_env(".database.env", recurse=False) MONGODB_CONNECTION_URI: str = env_db("MONGODB_CONNECTION_URI") MONGODB_DATABASE_NAME: str = env_db("MONGODB_DATABASE_NAME")
HOST_MYSQL = None USER_MYSQL = None PASS_MYSQL = None PORT_MYSQL = None NAME_BD_MYSQL = None NAME_FILE_DUMP_SQL_BD = None ADRESSE_SRV_FLASK = None DEBUG_FLASK = None PORT_FLASK = None SECRET_KEY_FLASK = None WTF_CSRF_ENABLED = True try: obj_env = Env() obj_env.read_env() HOST_MYSQL = obj_env("HOST_MYSQL") USER_MYSQL = obj_env("USER_MYSQL") PASS_MYSQL = obj_env("PASS_MYSQL") PORT_MYSQL = int( obj_env("PORT_MYSQL") ) # Pour la connection à la BD le port doit être une valeur numérique INT NAME_BD_MYSQL = obj_env("NAME_BD_MYSQL") NAME_FILE_DUMP_SQL_BD = obj_env("NAME_FILE_DUMP_SQL_BD") ADRESSE_SRV_FLASK = obj_env("ADRESSE_SRV_FLASK") DEBUG_FLASK = obj_env("DEBUG_FLASK") PORT_FLASK = obj_env("PORT_FLASK") SECRET_KEY_FLASK = obj_env("SECRET_KEY_FLASK")
async def create_new_role(request): """Create a new role.""" required_fields = ["name", "administrators", "owners"] utils.validate_fields(required_fields, request.json) role_title = " ".join(request.json.get("name").split()) response = await roles_query.roles_search_duplicate( request.app.config.DB_CONN, role_title ) if request.json.get("metadata") is None or request.json.get("metadata") == {}: set_metadata = {} else: set_metadata = request.json.get("metadata") set_metadata["sync_direction"] = "OUTBOUND" if not response: txn_key, txn_user_id = await utils.get_transactor_key(request) role_id = str(uuid4()) batch_list = Role().batch_list( signer_keypair=txn_key, signer_user_id=txn_user_id, name=role_title, role_id=role_id, metadata=set_metadata, admins=request.json.get("administrators"), owners=request.json.get("owners"), description=request.json.get("description"), ) sawtooth_response = await utils.send( request.app.config.VAL_CONN, batch_list, request.app.config.TIMEOUT ) if not sawtooth_response: LOGGER.warning("There was an error submitting the sawtooth transaction.") return await handle_errors( request, ApiInternalError( "There was an error submitting the sawtooth transaction" ), ) if role_title != "NextAdmins": distinguished_name_formatted = "CN=" + role_title + "," + GROUP_BASE_DN data_formatted = { "created_date": r.now(), "distinguished_name": distinguished_name_formatted, "group_nickname": role_title, "group_types": -2147483646, "name": role_title, "remote_id": distinguished_name_formatted, } env = Env() if env.int("ENABLE_LDAP_SYNC", 0): provider = env("LDAP_DC") elif env.int("ENABLE_AZURE_SYNC", 0): provider = env("TENANT_ID") else: provider = "NEXT-created" outbound_entry = { "data": data_formatted, "data_type": "group", "timestamp": r.now(), "provider_id": provider, "status": "UNCONFIRMED", } # Insert to outbound_queue and close await roles_query.insert_to_outboundqueue( request.app.config.DB_CONN, outbound_entry ) else: LOGGER.info( "The role being created is NextAdmins, which is local to NEXT and will not be inserted into the outbound_queue." ) return create_role_response(request, role_id) return await handle_errors( request, ApiTargetConflict( "Error: Could not create this role because the role name already exists." ), )
Generated by 'django-admin startproject' using Django 3.1.7. For more information on this file, see https://docs.djangoproject.com/en/3.1/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/3.1/ref/settings/ """ from pathlib import Path from environs import Env # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent ENV = Env() ENV.read_env() # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = ENV.str('SECRET_KEY') # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition
def default(cls): """Ignore environmental variables and use all default arguments as defined above""" # Put empty env to force using the given defaults cls.env = Env() cls.__init__(cls)
async def delete_role(request, role_id): """Delete a role by it's next_id. Args: role_id: str: the role_id field of the targeted role Returns: json: dict: { message: str: the status of the role delete operation deleted: int: count of the number of roles that were deleted } Raises: ApiForbidden: The user is not a system admin or owner of the targeted role. ApiNotFound: The role does not exist in RethinkDB. ApiInternalError: There was an error compiling blockchain transactions. """ log_request(request) env = Env() if not env.int("ENABLE_NEXT_BASE_USE"): raise ApiDisabled("Not a valid action. Source not enabled") txn_key, txn_user_id = await get_transactor_key(request) # does the role exist? conn = await create_connection() if not await roles_query.does_role_exist(conn, role_id): LOGGER.warning( "Nonexistent Role – User %s is attempting to delete the nonexistent role %s", txn_user_id, role_id, ) return await handle_not_found( request, ApiNotFound("The targeted role does not exist.") ) conn.close() is_role_owner = await check_role_owner_status(txn_user_id, role_id) if not is_role_owner: is_admin = await check_admin_status(txn_user_id) if not is_admin: LOGGER.warning( "Permission Denied – User %s does not have sufficient privilege to delete role %s.", txn_user_id, role_id, ) return await handle_errors( request, ApiForbidden("You do not have permission to delete this role.") ) txn_list = [] txn_list = await create_rjct_ppsls_role_txns( txn_key, role_id, txn_user_id, txn_list ) txn_list = await create_del_admin_by_role_txns(txn_key, role_id, txn_list) txn_list = await create_del_mmbr_by_role_txns(txn_key, role_id, txn_list) txn_list = await create_del_ownr_by_role_txns(txn_key, role_id, txn_list) txn_list = create_del_role_txns(txn_key, role_id, txn_list) # validate transaction list if not txn_list: LOGGER.warning( "txn_list is empty. There was an error processing the delete role transactions. Transaction list: %s", txn_list, ) return await handle_errors( request, ApiInternalError( "An error occurred while creating the blockchain transactions to delete the role." ), ) batch = batcher.make_batch_from_txns(transactions=txn_list, signer_keypair=txn_key) batch_list = batcher.batch_to_list(batch=batch) await send(request.app.config.VAL_CONN, batch_list, request.app.config.TIMEOUT) return json( {"message": "Role {} successfully deleted".format(role_id), "deleted": 1} )
def parse_config() -> DotDict: """ Parse configuration parameters from environment variables. Makes type validation. Raises: environs.EnvValidationError: if parsed data does not conform expected type. """ env = Env() env.read_env() config = { # Kafka related configuration # "kafka_broker_list": env.str("KAPG_BROKER_LIST", "localhost:9092,"), "kafka_topic": env.str("KAPG_KAFKA_TOPIC", "pagemonitor_metrics"), # Client group id string. All clients sharing the same group.id belong # to the same group "consumer_group.id": env.str("KAPG_GROUP_ID", "42"), # Action to take when there is no initial offset in offset store or # the desired offset is out of range "consumer_auto.offset.reset": env.str("KAPG_AUTOOFFSETRESET", "earliest"), "consumer_sleep_interval": env.float("KAPG_CONSUMER_SLEEP", 2.0), "kafka_enable_cert_auth": env.bool("KAPG_KAFKA_ENABLE_CERT_AUTH", False), # Only when cert authentication mode enabled "kafka_ssl_ca": env.path("KAPG_KAFKA_SSL_CA", "/etc/kapg/ssl/kafka/ca.pem"), "kafka_ssl_cert": env.path("KAPG_KAFKA_SSL_CERT", "/etc/kapg/ssl/kafka/service.cert"), "kafka_ssl_key": env.path("KAPG_KAFKA_SSL_KEY", "/etc/kapg/ssl/kafka/service.key"), # PostgreSQL related configuration # "pg_host": env.str("KAPG_PG_HOST", "localhost"), "pg_port": env.int("KAPG_PG_PORT", 5432), "pg_user": env.str("KAPG_PG_USER", "postgres"), "pg_password": env.str("KAPG_PG_PWD", "changeme"), "pg_db_name": env.str("KAPG_PG_DB_NAME", "metrics"), "pg_table_name": env.str("KAPG_PG_TABLE_NAME", "pagemonitor"), "pg_conn_timeout": env.float("KAPG_PG_CONN_TIMEOUT", 10.0), "pg_command_timeout": env.float("KAPG_PG_COMMAND_TIMEOUT", 10.0), # SSL config "pg_enable_ssl": env.bool("KAPG_PG_ENABLE_SSL", False), "pg_ssl_ca": env.path("KAPG_PG_SSL_CA", "/etc/kapg/ssl/postgres/ca.pem"), } return DotDict(config)
import os import pytest from environs import Env from utilities.api_helpers.api import API sut_env = Env() AGENCY: str = sut_env.str('AGENCY') @pytest.mark.api @pytest.mark.unit class TestApiHelper: """Battery of tests for API helper functionality.""" @pytest.fixture(autouse=True) def set_api(self) -> None: """Instantiate all APIs for API helper testing.""" self.api: API = API() @pytest.mark.low def test_build_api_url__localhost(self) -> None: """Check that the build_api_url handles localhost environments.""" os.environ['ENV'] = 'localhost' url: str = self.api.build_api_url(path='/testing') assert 'localhost' in url @pytest.mark.low def test_build_api_url__stage(self) -> None: """Check that the build_api_url handles stage environments.""" os.environ['ENV'] = 'stage'