Exemple #1
0
def create_app(no_db: bool = False):
    env = Env()
    env.read_env()

    logger = logging.getLogger(__name__)
    logger.addHandler(logging.StreamHandler(sys.stdout))

    app = Flask(__name__)
    app.register_blueprint(core, url_prefix='/api')

    with env.prefixed('WOL_'):
        logger.setLevel(env.log_level('LOG_LEVEL', logging.DEBUG))
        if not env('NO_DB', False) and not no_db and models:  # TODO: shit
            app.config['DATABASE'] = env.str('DATABASE_URL', 'postgres://postgres@localhost:5432/wol')
            models.db.init_app(app)
            app.register_blueprint(crud, url_prefix='/api')
            app.register_blueprint(pages)

    @app.errorhandler(ValidationError)
    def handle_validation(error: ValidationError):
        response = jsonify(error.messages)
        response.status_code = 400
        return response

    @app.errorhandler(NotImplementedError)
    def handle_not_implemented(_error: NotImplementedError):
        return Response(status=501)

    return app
 async def reload_logging_config():
     env = Env()
     env.read_env(override=True)
     log_level = env.log_level('LOG_LEVEL', logging.INFO)
     logging.root.setLevel(log_level)
     converter_wrapper.debug = log_level == logging.DEBUG
     return {'log_level': logging.getLevelName(logging.root.level)}
Exemple #3
0
if DEBUG:
    # Use the default static storage backed for debug purposes.
    STATICFILES_STORAGE = "django.contrib.staticfiles.storage.StaticFilesStorage"
else:
    # In production, use a manifest to encourage aggressive caching
    # Note requires `manage.py collectstatic`!
    STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"

# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field

DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"

# Logging

log_level = env.log_level("LOG_LEVEL", default="INFO")
query_log_level = env.log_level("QUERY_LOG_LEVEL", default="INFO")

# To debug what the *actual* config ends up being, use the logging_tree package
# See https://stackoverflow.com/a/53058203/14558
LOGGING = {
    "version": 1,
    "disable_existing_loggers": False,
    "handlers": {
        "console": {
            # The handler should print anything that gets to it, so that
            # debugging can be enabled for specific loggers without also turning
            # on debug loggers for all of django/python
            "level": "NOTSET",
            "class": "logging.StreamHandler",
        },
Exemple #4
0
import sys

from environs import Env

from iolite_client.client import Client
from iolite_client.entity import RadiatorValve
from iolite_client.oauth_handler import OAuthHandler, OAuthStorage, OAuthWrapper

env = Env()
env.read_env()

USERNAME = env("HTTP_USERNAME")
PASSWORD = env("HTTP_PASSWORD")
CODE = env("CODE")
NAME = env("NAME")
LOG_LEVEL = env.log_level("LOG_LEVEL", logging.INFO)

logging.basicConfig(level=logging.getLevelName(LOG_LEVEL))
logger = logging.getLogger(__name__)

# Get SID
oauth_storage = OAuthStorage(".")
oauth_handler = OAuthHandler(USERNAME, PASSWORD)
oauth_wrapper = OAuthWrapper(oauth_handler, oauth_storage)
sid = oauth_wrapper.get_sid(CODE, NAME)

print("------------------")
print(f"URL: https://remote.iolite.de/ui/?SID={sid}")
print(f"User: {USERNAME}")
print(f"Pass: {PASSWORD}")
print("------------------")
Exemple #5
0
from environs import Env

env = Env()
env.read_env()

API_CONFIG = {
    'development_host': env.str('DEVELOPMENT_HOST', '0.0.0.0'),
    'development_port': env.int('DEVELOPMENT_PORT', 21001),
    'pos_url': env.str('POS_URL'),
    'ner_url': env.str('NER_URL'),
    'onmt_server_host': env.str('ONMT_SERVER_HOST', '0.0.0.0'),
    'onmt_server_port': env.int('ONMT_SERVER_PORT', 21000),
    'onmt_server_url_root': env.str('ONMT_SERVER_URL_ROOT', '/question-generator'),
    'onmt_server_config': env.str('ONMT_SERVER_CONFIG'),
    'random_answer_entity_chance': env.float('RANDOM_ANSWER_ENTITY_CHANCE'),
    'log_level': env.log_level('LOG_LEVEL', 'INFO'),
}
Exemple #6
0
env = Env()
env.read_env()

SYNONYMS_SEARCH_URL = 'https://rusvectores.org/tayga_upos_skipgram_300_2_2019/' \
                      '{word}/api/json/'
CORPUS_EXAMPLES_MARKER = lambda ex: f"<b>{ex.upper()}</b>"  # noqa

with env.prefixed('API_'):
    API_VERSION = env('VERSION', '0.1.0')
    API_HOST = env('HOST')
    API_PORT = env.int('PORT')
    API_DEBUG = env.bool('DEBUG', False)

with env.prefixed('LOGGER_'):
    LOGGER_NAME = env('NAME', 'Vocabulary')
    LOGGER_LEVEL = env.log_level('LEVEL', 'debug')

with env.prefixed('DB_'):
    DB_DSN_TEMPLATE = "postgresql+{driver}://{username}:{password}@" \
                      "{host}:{port}/{name}"

    DB_HOST = env('HOST')
    DB_PORT = env.int('PORT')
    DB_USERNAME = env('USERNAME')
    DB_PASSWORD = env('PASSWORD')
    DB_NAME = env('NAME')
    DB_ISOLATION_LEVEL = env('ISOLATION_LEVEL', 'REPEATABLE READ')

os.environ.clear()
Exemple #7
0
import logging
import os
import sys

from environs import Env

import api as api_module

env = Env()
env.read_env()

LOG_LEVEL = env.log_level('LOG_LEVEL', logging.INFO)
LOG_API_DIR = 'log'
ROOT_MODULE_NAME = 'liturgi_format_converter'
API_MODULE_NAME = f'{ROOT_MODULE_NAME}.{api_module.__name__}'

logging.basicConfig(level=LOG_LEVEL, stream=sys.stdout)
logging.debug(f'ROOT_MODULE_NAME is: {ROOT_MODULE_NAME}')
logging.debug(f'API_LOGGER_NAME is: {API_MODULE_NAME}')

os.makedirs(LOG_API_DIR, exist_ok=True)
LOGGING_CONFIG = dict(
    version=1,
    disable_existing_loggers=False,
    root={
        'level': LOG_LEVEL,
        'handlers': []
    },
    loggers={
        ROOT_MODULE_NAME: {
            'handlers': ['console_handler', 'time_rotating_file_handler_root']
Exemple #8
0
import os

from environs import Env

# Load environment variables.
env = Env()

# Paths.
HERE = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.join(HERE, os.pardir)

# General Flask settings.
ENV = env.str("FLASK_ENV", "production")
DEBUG = ENV == "development"
LOG_LEVEL = env.log_level("LOG_LEVEL", "INFO")
SECRET_KEY = env.str("SECRET_KEY")

# DB access / ORM.
SQLALCHEMY_DATABASE_URI = env.str("DATABASE_URL")
SQLALCHEMY_TRACK_MODIFICATIONS = False

# API spec/docs hosting.
API_TITLE = "Flask Application Template"
API_VERSION = "v1"
OPENAPI_VERSION = "3.0.2"
OPENAPI_URL_PREFIX = "/smorest"
OPENAPI_SWAGGER_UI_PATH = "/docs"
OPENAPI_SWAGGER_UI_VERSION = "3.24.2"

# Background tasks.
DRAMATIQ_BROKER_URL = env.str("BROKER_URL", "redis://localhost:6379/3")
Exemple #9
0
from environs import Env

_env = Env()
_env.read_env()

LOG_LEVEL = _env.log_level("LOG_LEVEL", default="INFO")

REDIS_HOST = _env.str("REDIS_HOST", default="localhost")
REDIS_PORT = _env.int("REDIS_PORT", default=6379)
REDIS_PASSWORD = _env.str("REDIS_PASSWORD", default=None)

TEST_IN_MEMORY_LINKS_REPOSITORY = _env.bool("TEST_IN_MEMORY_LINKS_REPOSITORY",
                                            default=True)
# export VAR_JSON='{"name": "germey", "age": 25}'
# export VAR_URL=https://cuiqingcai.com
# export VAR_UUID=762c8d53-5860-4d5d-81bc-210bf2663d0e
# export VAR_PATH=/var/py/env

from environs import Env
env = Env()
# required variables
gh_user = env("GITHUB_USER")  # => 'sloria'
secret = env("SECRET")  # => raises error if not set

# casting
max_connections = env.int("MAX_CONNECTIONS")  # => 100
ship_date = env.date("SHIP_DATE")  # => datetime.date(1984, 6, 25)
ttl = env.timedelta("TTL")  # => datetime.timedelta(0, 42)
log_level = env.log_level("LOG_LEVEL")  # => logging.DEBUG

# providing a default value
enable_login = env.bool("ENABLE_LOGIN", False)  # => True
enable_feature_x = env.bool("ENABLE_FEATURE_X", False)  # => False

# parsing lists
gh_repos = env.list("GITHUB_REPOS")  # => ['webargs', 'konch', 'ped']
coords = env.list("COORDINATES", subcast=float)  # => [23.3, 50.0]
"""
将环境变量定义到文件中,使用environs进行读取和加载,默认会读取本地当前运行目录下的 .env 文件
"""
from environs import Env
env = Env()
# env.read_env()  # read .env file, if it exists
env.read_env(path='.env.test')  # 读取自定义的文件
Exemple #11
0
########################### ENVIRONMENT VARIABLES ############################

# General settings

# Django would like you to refer to this document before deploy:
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/

# Enable Django debug: leave this off in production
DEBUG = env.bool("DEBUG", False)

# The encryption key! Please keep this secret!
SECRET_KEY = env("SECRET_KEY", INVALID_SECRET_KEY)

# How noisy should the console logs be.
LOG_LEVEL = env.log_level("LOG_LEVEL", "WARN")

# Which domains can access this site?
ALLOWED_HOSTS: List[str] = env.list("ALLOWED_HOSTS", ["localhost"])

############################## ENV: DIRECTORIES ##############################

# Where persistent data will be placed
DATA_DIR = env.path("DATA_DIR", os.fspath(BASE_PATH / "run"))

# The Google Sheets ID of the vocabulary list.
# Obtain this ID
ONESPOT_GOOGLE_SHEETS_ID = env("ONESPOT_GOOGLE_SHEETS_ID", None)

############################### ENV: DATABASES ###############################
Exemple #12
0
    API_DEBUG = env.bool("DEBUG", False)
    API_VERSION = env("VERSION")

with env.prefixed("DB_"):
    DB_HOST = env("HOST")
    DB_PORT = env.int("PORT")
    DB_NAME = env("NAME")
    DB_USERNAME = env("USERNAME")
    DB_PASSWORD = env("PASSWORD")

DB_URI = DSN_TEMPLATE.format(username=DB_USERNAME,
                             password=DB_PASSWORD,
                             host=DB_HOST,
                             port=DB_PORT,
                             db_name=DB_NAME)

with env.prefixed("LOGGER_"):
    LOGGER_NAME = env("NAME", "ReadingTracker")
    LOGGER_LEVEL = env.log_level("LEVEL", 'debug')

with env.prefixed("PER_DAY_"):
    PAGES_PER_DAY = env.int('PER_DAY_PAGES', 50)
    # max count of cards repeated per day
    _MAX_PER_DAY = env.int('PER_DAY_CARDS', 25)

with env.prefixed("DRIVE_"):
    DRIVE_TOKEN_PATH = env.path("TOKEN_PATH", "data/token.json")
    DRIVE_CREDS_PATH = env.path("CREDS_PATH", "data/creds.json")

os.environ.clear()
Exemple #13
0
    def __init__(self, data_dir, log_level, log_to_file):

        env = Env()
        env.read_env()

        try:
            self.data_dir = env.str("PROGBOTT_DATA_DIR")
        except EnvError:
            self.data_dir = data_dir

        env.read_env(self.data_dir + "/.env")

        try:
            self.log_level = env.log_level("PROGBOTT_LOG_LEVEL")
        except EnvError:
            self.log_level = log_level

        try:
            self.log_to_file = env.str("PROGBOTT_LOG_FILE")
        except EnvError:
            self.log_to_file = log_to_file

        setting_path = self.data_dir + "/settings.json"

        extra = {"github": {}}
        try:
            with codecs.open(setting_path, "r", encoding="utf8") as f:
                fil = json.load(f)
                self.token = fil.get("token")
                self.prefix = fil.get("prefixes")

                extra = {**extra, **fil.get("extra")}
        except FileNotFoundError:
            pass

        try:
            with env.prefixed("PROGBOTT_"):
                self.token = env.str("TOKEN")
                self.prefix = env.list("PREFIXES")

                with env.prefixed("EXTRA_GITHUB_"):
                    extra["github"]["secret"] = env.str("SECRET")
                    extra["github"]["client_id"] = env.str("CLIENTID")
                    extra["github"]["callback_url"] = env.str("CALLBACKURL",
                                                              validate=URL())

        except EnvError:
            pass

        if not self.prefix:
            self.prefix = "^"

        try:
            isinstance(self.token, str)
        except AttributeError:
            raise NoToken("No token found")

        try:
            self.extra = namedtuple("settings", extra.keys())(*extra.values())
        except KeyError:
            pass
Exemple #14
0
"""

from pathlib import Path
from tempfile import gettempdir

# NOTE: Flask global config object ignores the value if not all CAPS
from environs import Env

env = Env()
env.read_env()

## API Key to pass in header X-API-KEY
AUTH_KEY = env.str("AUTH_KEY")

## tape library names if multiple are available list them to load balance across them
## this does assume that all files are available on both libraries

LTFSEE_LIB = env.list("LTFSEE_LIB", default=[])

## sets log level ERROR, INFO, WARNING, DEBUG
## This does not set logging level for core libraries or gunicorn, look in logging.cfg
LOGLEVEL = env.log_level("LOGLEVEL", default="WARNING")

# FLASK Settings
DEBUG = env.bool("FLASK_DEBUG", default=False)
CACHE_TYPE = "filesystem"
CACHE_DEFAULT_TIMEOUT = env.int("CACHE_DEFAULT_TIMEOUT",
                                default=300)  # seconds to cache
CACHE_DIR = "/tmp/ltfsee_globus"
CACHE_OPTIONS = {"mode": 0o400}  # 3 digit linux-style permissions octal mode
Exemple #15
0
from decimal import Decimal
from environs import Env
from binance_client import constants

env = Env()
env.read_env('.env')

DEBUG = env.bool('DEBUG', True)
LOG_LEVEL = env.log_level("LOG_LEVEL")

TIMEZONE = 0

# ------------------------------ BINANCE --------------------------------
BINANCE_API_KEY = env.str('BINANCE_API_KEY')

BINANCE_API_SECRET = env.str('BINANCE_API_SECRET')

APPLICATION_NAME = 'Venom'
BASE_CURRENCY = "USD"
EQUITY = Decimal("1000.00")

OANDA_DOMAIN = env.str('OANDA_DOMAIN', 'DEMO')
OANDA_ACCESS_TOKEN = env.str('OANDA_ACCESS_TOKEN', None)
OANDA_ACCOUNT_ID = env.str('OANDA_ACCOUNT_ID', None)

CSV_DATA_DIR = env.str('QSFOREX_CSV_DATA_DIR', None)
OUTPUT_RESULTS_DIR = env.str('QSFOREX_OUTPUT_RESULTS_DIR', None)

TELSTRA_CLIENT_KEY = env.str('TELSTRA_CLIENT_KEY', '')
TELSTRA_CLIENT_SECRET = env.str('TELSTRA_CLIENT_SECRET', '')
ADMIN_MOBILE_NUMBER = env.str('ADMIN_MOBILE_NUMBER', '')
Exemple #16
0
"""Settings module
"""

from environs import Env

env = Env()
env.read_env()

UNIT_TESTS = env.bool('UNIT_TESTS', False)

DEBUG = env.bool('DEBUG', default=False)

LOG_LEVEL = env.log_level('LOG_LEVEL', 'INFO')

BROKER_REDIS_URL = env('BROKER_REDIS_URL', 'redis://127.0.0.1:6379/0')
RESULT_REDIS_URL = env('RESULT_REDIS_URL', 'redis://127.0.0.1:6379/1')
RATE_LIMITS_REDIS_URL = env(
    'RATE_LIMITS_REDIS_URL',
    'redis://127.0.0.1:6379/2',
)

SNAKE_API_ADDRESS = env('SNAKE_API_ADDRESS', 'http://localhost:8080/api')
SNAKE_CLIENT_URL = env('SNAKE_CLIENT_URL', 'http://localhost:8080/client/')
CLIENT_NAME = env('CLIENT_NAME', 'SnakeCLIClient')

TASK_INTERVAL_SCREENSHOT = env.int('TASK_INTERVAL_SCREENSHOT', 60)
TASK_INTERVAL_DELETE_CACHE = env.int('TASK_INTERVAL_DELETE_CACHE', 3600)

# Prometheus

PROMETHEUS_METRICS_LISTEN_HOST = env(