def parse_config() -> DotDict: """ Parse configuration parameters from environment variables. Makes type validation. Raises: environs.EnvValidationError: if parsed data does not conform expected type. """ env = Env() env.read_env() config = { # env.url() returns obj of type urllib.parse.ParseResult "page_url": env.url("PAGEMON_URL").geturl(), "ping_interval": env.float("PAGEMON_PING_INTERVAL", 10), # Number of seconds to wait to establish a connection to a remote machine. # It’s a good practice to set connect timeouts to slightly larger than a # multiple of 3, which is the default TCP packet retransmission window # (https://www.hjp.at/doc/rfc/rfc2988.txt) "conn_timeout": env.float("PAGEMON_CONNECT_TIMEOUT", 4), # Number of seconds the client will wait for the server to send a response. # In 99.9% of cases, this is the time before the server sends the first byte). "read_timeout": env.float("PAGEMON_READ_TIMEOUT", 3), # Number of retries for exponential backoff "backoff_retries": env.int("PAGEMON_BACKOFF_RETRIES", 10), # Kafka related configuration "kafka_broker_list": env.str("PAGEMON_BROKER_LIST", "localhost:9092,"), "kafka_topic": env.str("PAGEMON_KAFKA_TOPIC", "pagemonitor_metrics"), # How many times to retry sending a failing Message "producer_retries": env.int("PAGEMON_PRODUCER_RETRIES", 3), # Authentication mode "kafka_enable_cert_auth": env.bool("PAGEMON_ENABLE_CERT_AUTH", False), # Only when cert authentication mode enabled "kafka_ssl_ca": env.path("PAGEMON_SSL_CA", "/etc/pagemon/ssl/ca.pem"), "kafka_ssl_cert": env.path("PAGEMON_SSL_CERT", "/etc/pagemon/ssl/service.cert"), "kafka_ssl_key": env.path("PAGEMON_SSL_KEY", "/etc/pagemon/ssl/service.key"), } return DotDict(config)
def _load_ghe_options(env: Env): global GHE_HOST, GHE_PROTO, GHE_API_URL, GHE_API_SPEC with env.prefixed("GHE_"): GHE_API_SPEC = env.str( "API_SPEC", "api.github.com", validate=OneOf(specifications.keys(), error="GHE_API_SPEC must be one of: {choices}"), ) if GHE_API_SPEC == "api.github.com": GHE_PROTO = "https" GHE_HOST = "github.com" GHE_API_URL = urlparse("https://api.github.com") else: GHE_PROTO = env.str( "PROTO", "https", validate=OneOf(["http", "https"], error="GHE_PROTO must be one of: {choices}"), ) GHE_HOST = env.str("HOST") GHE_API_URL = env.url("GHE_API_URL", f"{GHE_PROTO}://{GHE_HOST}/api/v3")
import logging from environs import Env env = Env() # validate env vars env.url('DATABRICKS_HOST') mlflow_config = { 'api_prefix': env("MLFLOW_SERVER_API_PREFIX"), 'search_path': env("MLFLOW_SERVER_SEARCH_PATH"), 'mlflow_experiment_id': str(int(env.float("MLFLOW_EXPERIMENT_ID"))), 'databricks_token': env('DATABRICKS_TOKEN'), 'databricks_host': env('DATABRICKS_HOST'), 'models_relative_path': env('MODELS_RELATIVE_PATH'), 'model_name': env('MODEL_NAME'), "timeout": env.int("MLFLOW_REQUESTS_TIMEOUT") } service_config = { "appinsights_key": env("APP_INSIGHTS_INSTRUMENTATION_KEY"), "service_name": env("SERVICE_NAME"), "log_level": env.int("LOG_LEVEL", logging.INFO), "port": env.int("PORT", 80) }
TIME_ZONE = "UTC" USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/3.0/howto/static-files/ STATIC_URL = "/static/" CELERY_BROKER_URL = env.url( "CELERY_BROKER_URL", default="amqp://*****:*****@127.0.0.1:5672", schemes={"amqp"}, require_tld=False, ).geturl() CELERY_RESULT_BACKEND = "django-db" CELERY_CACHE_BACKEND = "django-cache" CELERY_TASK_ROUTES = { "parking.tasks.finish_parking": { "queue": "finish_parking" }, "parking.tasks.parking_report": { "queue": "parking_report" }, } CELERY_APP = "celery_demo" CELERY_TRACK_STARTED = True CELERY_TASK_COMPRESSION = "bzip2"
from environs import Env from invite0 import data env = Env() env.read_env() SERVER_NAME = env.str('INVITE0_DOMAIN') ORG_NAME = env.str('ORG_NAME') USER_FIELDS = env.list('USER_FIELDS', default=['picture', 'nickname', 'given_name', 'family_name']) INVITE_EXPIRATION_DAYS = env.decimal('INVITE_EXPIRATION_DAYS', default=5) INVITE_PERMISSION = env.str('INVITE_PERMISSION', default='send:invitation') WELCOME_URL = env.url('WELCOME_URL', default=None).geturl() SECRET_KEY = env.str('SECRET_KEY') MAIL_SERVER = env.str('MAIL_SERVER') MAIL_PORT = env.str('MAIL_PORT') MAIL_USE_TLS = env.bool('MAIL_USE_TLS', default=False) MAIL_USE_SSL = env.bool('MAIL_USE_SSL', default=False) MAIL_USERNAME = env.str('MAIL_USERNAME') MAIL_PASSWORD = env.str('MAIL_PASSWORD') MAIL_SENDER_NAME = env.str('MAIL_SENDER_NAME', default=None) MAIL_SENDER_ADDRESS = env.str('MAIL_SENDER_ADDRESS') MAIL_MAX_EMAILS = env.int('MAIL_MAX_EMAILS', default=None) AUTH0_CLIENT_ID = env.str('AUTH0_CLIENT_ID') AUTH0_CLIENT_SECRET = env.str('AUTH0_CLIENT_SECRET') AUTH0_AUDIENCE = env.str('AUTH0_AUDIENCE') AUTH0_DOMAIN = env.str('AUTH0_DOMAIN')
LOGGING = { "version": 1, "disable_existing_loggers": False, "formatters": { "simple": { "()": "django.utils.log.ServerFormatter", "format": "[{server_time}] ({levelname}): {message}", "style": "{", }, }, "handlers": { "SysLog": { "level": "INFO", "formatter": "simple", "class": "logging.handlers.SysLogHandler", "address": (env.url("SYSLOG_URL"), env.int("SYSLOG_PORT")), }, }, "loggers": { "django": { "handlers": ["SysLog"], "level": "INFO", }, }, } # Static file and media file settings STATIC_LOCATION = "static" MEDIA_LOCATION = "media" STATIC_URL = "/static/" MEDIA_URL = "/media/"
def __init__(self, envFile=None): env = Env() # Read a .env file if one is specified, otherwise only environment variables will be used. env.read_env(envFile, recurse=False, verbose=True) # URL of the Prometheus server. Default value works within cluster for default bitnami/kube-prometheus Helm release. # Format: validity is determined by python urllib.parse. self.prometheus_server = env.url( "PROMETHEUS_SERVER", "http://kube-prometheus-prometheus.kube-prometheus:9090").geturl() # The default behaviour ("auto" mode) is to publish records for the previous month, and up to the current day of the current month. self.publishing_mode = env.str("PUBLISHING_MODE", "auto") # If PUBLISH_MODE is "gap" instead, then a fixed time period will be queried instead and we need the start and end to be specified. # Format: ISO 8601, like "2020-12-20T07:20:50.52Z", to avoid complications with time zones and leap seconds. # Timezone should be specified, and it should be UTC for consistency with the auto mode publishing. # IMPORTANT NOTE: since only APEL summary records are supported (not individual job records), # if you specify QUERY_START as a time that is NOT precisely the beginning of a month, a partial month summary record will be produced and published. # The APEL server may ignore it if it already has a summary record for that month containing more jobs. Therefore when using gap mode make sure # that QUERY_START is precisely the beginning of a month in order to produce a complete summary record for that month which will take precedence over # any other records containing fewer jobs that may have already been published. The same applies for QUERY_END # matching the end of the month (unless it is the current month at the time of publishing, in which case a subsequent run in auto mode will eventually # complete the records for this month). So QUERY_START (and possibly QUERY_END) should look like e.g. '2021-02-01T00:00:00+00:00' if self.publishing_mode == "gap": self.query_start = env.datetime("QUERY_START") self.query_end = env.datetime("QUERY_END") else: # set a defined but invalid value to simplify time period functions self.query_start = None self.query_end = None # Timeout for the server to evaluate the query. Can take awhile for large-scale production use. # Format: https://prometheus.io/docs/prometheus/latest/querying/basics/#time-durations self.query_timeout = env.str("QUERY_TIMEOUT", "1800s") # Where to write the APEL message output. self.output_path = env.path("OUTPUT_PATH", "/srv/kapel") ## Info for APEL records, see https://wiki.egi.eu/wiki/APEL/MessageFormat # GOCDB site name self.site_name = env.str("SITE_NAME") # uniquely identifying name of cluster (like CE ID) host_name:port/namespace self.submit_host = env.str("SUBMIT_HOST") # Benchmark type (HEPSPEC by default) #self.benchmark_type = env.str("BENCHMARK_TYPE", "HEPSPEC") # Benchmark value self.benchmark_value = env.float("BENCHMARK_VALUE") # VO of jobs self.vo_name = env.str("VO_NAME") # infrastructure info self.infrastructure_type = env.str("INFRASTRUCTURE_TYPE", "grid") self.infrastructure_description = env.str("INFRASTRUCTURE_DESCRIPTION", "APEL-KUBERNETES") # optionally define number of nodes and processors. Should not be necessary to # set a default of 0 here but see https://github.com/apel/apel/issues/241 self.nodecount = env.int("NODECOUNT", 0) self.processors = env.int("PROCESSORS", 0)
def _load_other_options(env: Env): global WEBHOOK_PROXY_URL WEBHOOK_PROXY_URL = env.url("WEBHOOK_PROXY_URL", None)
from environs import Env from invite0 import data env = Env() env.read_env() SERVER_NAME = env.str('INVITE0_DOMAIN') ORG_NAME = env.str('ORG_NAME') ORG_LOGO = env.url('ORG_LOGO', default=None).geturl() USER_FIELDS = env.list( 'USER_FIELDS', default=['picture', 'nickname', 'given_name', 'family_name']) REQUIRED_USER_FIELDS = env.list('REQUIRED_USER_FIELDS', default=[]) INVITE_EXPIRATION_DAYS = env.decimal('INVITE_EXPIRATION_DAYS', default=5) INVITE_SUBJECT = env.str('INVITE_SUBJECT', default=f'{ORG_NAME} | Sign Up') INVITE_PERMISSION = env.str('INVITE_PERMISSION', default='send:invitation') WELCOME_URL = env.url('WELCOME_URL', default=None).geturl() SECRET_KEY = env.str('SECRET_KEY') MAIL_SERVER = env.str('MAIL_SERVER') MAIL_PORT = env.str('MAIL_PORT') MAIL_USE_TLS = env.bool('MAIL_USE_TLS', default=False) MAIL_USE_SSL = env.bool('MAIL_USE_SSL', default=False) MAIL_USERNAME = env.str('MAIL_USERNAME') MAIL_PASSWORD = env.str('MAIL_PASSWORD') MAIL_SENDER_NAME = env.str('MAIL_SENDER_NAME', default=None) MAIL_SENDER_ADDRESS = env.str('MAIL_SENDER_ADDRESS') MAIL_MAX_EMAILS = env.int('MAIL_MAX_EMAILS', default=None) AUTH0_CLIENT_ID = env.str('AUTH0_CLIENT_ID')