def parse_config() -> DotDict: """ Parse configuration parameters from environment variables. Makes type validation. Raises: environs.EnvValidationError: if parsed data does not conform expected type. """ env = Env() env.read_env() config = { # env.url() returns obj of type urllib.parse.ParseResult "page_url": env.url("PAGEMON_URL").geturl(), "ping_interval": env.float("PAGEMON_PING_INTERVAL", 10), # Number of seconds to wait to establish a connection to a remote machine. # It’s a good practice to set connect timeouts to slightly larger than a # multiple of 3, which is the default TCP packet retransmission window # (https://www.hjp.at/doc/rfc/rfc2988.txt) "conn_timeout": env.float("PAGEMON_CONNECT_TIMEOUT", 4), # Number of seconds the client will wait for the server to send a response. # In 99.9% of cases, this is the time before the server sends the first byte). "read_timeout": env.float("PAGEMON_READ_TIMEOUT", 3), # Number of retries for exponential backoff "backoff_retries": env.int("PAGEMON_BACKOFF_RETRIES", 10), # Kafka related configuration "kafka_broker_list": env.str("PAGEMON_BROKER_LIST", "localhost:9092,"), "kafka_topic": env.str("PAGEMON_KAFKA_TOPIC", "pagemonitor_metrics"), # How many times to retry sending a failing Message "producer_retries": env.int("PAGEMON_PRODUCER_RETRIES", 3), # Authentication mode "kafka_enable_cert_auth": env.bool("PAGEMON_ENABLE_CERT_AUTH", False), # Only when cert authentication mode enabled "kafka_ssl_ca": env.path("PAGEMON_SSL_CA", "/etc/pagemon/ssl/ca.pem"), "kafka_ssl_cert": env.path("PAGEMON_SSL_CERT", "/etc/pagemon/ssl/service.cert"), "kafka_ssl_key": env.path("PAGEMON_SSL_KEY", "/etc/pagemon/ssl/service.key"), } return DotDict(config)
def get_output_path() -> Path: env = Env() env.read_env() output_path = env.path('OUTPUT_PATH', './output/') if not output_path.is_absolute(): output_path = get_root_path().joinpath(output_path) return Path(os.path.normcase(output_path)).resolve()
async def main(): """ Main function """ env = Env() env.read_env() setup_logger(env.int("LOG_LEVEL", logging.INFO), env.path("LOG_FILE", None)) async with aiohttp.ClientSession() as session: bot = create_bot(env, session) LOGGER.debug("Starting bot") await bot.start(env.str("BOT_TOKEN"))
# --------------------------------------------------- verNum = '1125' version = '1.0.2.11162350.11' """********************************* 服务器后端配置 *********************************""" # --------------------------------------------------- # 服务器工程目录,基于linux # RootDir:/qinse/V2RaycSpider{verNum} # --------------------------------------------------- # Chromedriver 路径,暂不支持Mac环境的云服务器 # Linux Google Chrome v85.0.4183.102 if 'win' in platform: CHROMEDRIVER_PATH = dirname( __file__) + '/BusinessCentralLayer/chromedriver.exe' SERVER_DIR_PROJECT = env.path("SERVER_DIR_PROJECT", dirname(__file__)) else: CHROMEDRIVER_PATH = dirname( __file__) + '/BusinessCentralLayer/chromedriver' SERVER_DIR_PROJECT = env.path('SERVER_DIR_PROJECT', f'/qinse/V2RaycSpider{verNum}') # 文件型数据库路径 SERVER_DIR_DATABASE = env.path('SERVER_DIR_DATABASE', join(SERVER_DIR_PROJECT, 'Database')) # 历史客户端仓库 SERVER_DIR_CLIENT_DEPORT = env.path('SERVER_DIR_CLIENT_DEPORT', join(SERVER_DIR_DATABASE, 'client_depot')) # 版本管理文件
def __init__(self, envFile=None): env = Env() # Read a .env file if one is specified, otherwise only environment variables will be used. env.read_env(envFile, recurse=False, verbose=True) # URL of the Prometheus server. Default value works within cluster for default bitnami/kube-prometheus Helm release. # Format: validity is determined by python urllib.parse. self.prometheus_server = env.url( "PROMETHEUS_SERVER", "http://kube-prometheus-prometheus.kube-prometheus:9090").geturl() # The default behaviour ("auto" mode) is to publish records for the previous month, and up to the current day of the current month. self.publishing_mode = env.str("PUBLISHING_MODE", "auto") # If PUBLISH_MODE is "gap" instead, then a fixed time period will be queried instead and we need the start and end to be specified. # Format: ISO 8601, like "2020-12-20T07:20:50.52Z", to avoid complications with time zones and leap seconds. # Timezone should be specified, and it should be UTC for consistency with the auto mode publishing. # IMPORTANT NOTE: since only APEL summary records are supported (not individual job records), # if you specify QUERY_START as a time that is NOT precisely the beginning of a month, a partial month summary record will be produced and published. # The APEL server may ignore it if it already has a summary record for that month containing more jobs. Therefore when using gap mode make sure # that QUERY_START is precisely the beginning of a month in order to produce a complete summary record for that month which will take precedence over # any other records containing fewer jobs that may have already been published. The same applies for QUERY_END # matching the end of the month (unless it is the current month at the time of publishing, in which case a subsequent run in auto mode will eventually # complete the records for this month). So QUERY_START (and possibly QUERY_END) should look like e.g. '2021-02-01T00:00:00+00:00' if self.publishing_mode == "gap": self.query_start = env.datetime("QUERY_START") self.query_end = env.datetime("QUERY_END") else: # set a defined but invalid value to simplify time period functions self.query_start = None self.query_end = None # Timeout for the server to evaluate the query. Can take awhile for large-scale production use. # Format: https://prometheus.io/docs/prometheus/latest/querying/basics/#time-durations self.query_timeout = env.str("QUERY_TIMEOUT", "1800s") # Where to write the APEL message output. self.output_path = env.path("OUTPUT_PATH", "/srv/kapel") ## Info for APEL records, see https://wiki.egi.eu/wiki/APEL/MessageFormat # GOCDB site name self.site_name = env.str("SITE_NAME") # uniquely identifying name of cluster (like CE ID) host_name:port/namespace self.submit_host = env.str("SUBMIT_HOST") # Benchmark type (HEPSPEC by default) #self.benchmark_type = env.str("BENCHMARK_TYPE", "HEPSPEC") # Benchmark value self.benchmark_value = env.float("BENCHMARK_VALUE") # VO of jobs self.vo_name = env.str("VO_NAME") # infrastructure info self.infrastructure_type = env.str("INFRASTRUCTURE_TYPE", "grid") self.infrastructure_description = env.str("INFRASTRUCTURE_DESCRIPTION", "APEL-KUBERNETES") # optionally define number of nodes and processors. Should not be necessary to # set a default of 0 here but see https://github.com/apel/apel/issues/241 self.nodecount = env.int("NODECOUNT", 0) self.processors = env.int("PROCESSORS", 0)
import os import pathlib import marshmallow as ma from environs import Env os.environ["STATIC_PATH"] = "app/static" class PathField(ma.fields.Field): def _deserialize(self, value, *args, **kwargs): return pathlib.Path(value) def _serialize(self, value, *args, **kwargs): return str(value) env = Env() env.parser_from_field("path", PathField) static_path = env.path("STATIC_PATH") assert isinstance(static_path, pathlib.Path) print(env.dump())
import os import pathlib import marshmallow as ma from environs import Env os.environ['STATIC_PATH'] = 'app/static' class PathField(ma.fields.Field): def _deserialize(self, value, *args, **kwargs): return pathlib.Path(value) def _serialize(self, value, *args, **kwargs): return str(value) env = Env() env.parser_from_field('path', PathField) static_path = env.path('STATIC_PATH') assert isinstance(static_path, pathlib.Path) print(env.dump())
# Enable Django debug: leave this off in production DEBUG = env.bool("DEBUG", False) # The encryption key! Please keep this secret! SECRET_KEY = env("SECRET_KEY", INVALID_SECRET_KEY) # How noisy should the console logs be. LOG_LEVEL = env.log_level("LOG_LEVEL", "WARN") # Which domains can access this site? ALLOWED_HOSTS: List[str] = env.list("ALLOWED_HOSTS", ["localhost"]) ############################## ENV: DIRECTORIES ############################## # Where persistent data will be placed DATA_DIR = env.path("DATA_DIR", os.fspath(BASE_PATH / "run")) # The Google Sheets ID of the vocabulary list. # Obtain this ID ONESPOT_GOOGLE_SHEETS_ID = env("ONESPOT_GOOGLE_SHEETS_ID", None) ############################### ENV: DATABASES ############################### with env.prefixed("DATABASE_"): DATABASE_ENGINE = env("ENGINE", "django.db.backends.sqlite3") DATABASE_NAME = env("NAME", os.fspath(DATA_DIR / "db.sqlite3")) DATABASE_LOG_LEVEL = env.log_level("LOG_LEVEL", "WARN") ############################# ENV: STATIC FILES ############################## # Static files (CSS, JavaScript, Images)
"django.contrib.auth.context_processors.auth", "django.contrib.messages.context_processors.messages", ], }, }, ] WSGI_APPLICATION = "simpledeck.wsgi.application" # Database # https://docs.djangoproject.com/en/3.2/ref/settings/#databases DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": env.path("SIMPLEDECK_DB_PATH", str(BASE_DIR / "db.sqlite3")), } } # Password validation # https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { "NAME": ("django.contrib.auth.password_validation.UserAttributeSimilarityValidator" ), }, { "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
RSS_POST_ROUTE_PARTIAL = "blog/post" env = Env() @env.parser_for("furl") def url_parser(value: str) -> Url: return Url(value) with env.prefixed("PT_"): address = env.str("ADDRESS", "0.0.0.0") # nosec port = env.int("PORT", 5000) debug = env.bool("DEBUG", False) static_handler = env.bool("ENABLE_STATIC_HANDLER", True) blog_path = env.path("BLOG_PATH", "./blog") public_url = env.furl("PUBLIC_URL", DEFAULT_PUBLIC_URL) blog_static_url = env.furl("BLOG_STATIC_URL", DEFAULT_BLOG_STATIC_URL) fq_url = env.furl("FQ_URL", f"http://localhost:{port}") # Disabled by default because rate limit can be hit relatively easily # during development enable_github = env.bool("ENABLE_GITHUB", False) github_user = env.str("GITHUB_USER", "PhilipTrauner") # Disabled by default because credentials are necessary enable_spotify = env.bool("ENABLE_SPOTIFY", False) spotify_user = env.str("SPOTIFY_USER", "philip.trauner") spotify_client_id = env.str("SPOTIFY_CLIENT_ID", None) spotify_client_secret = env.str("SPOTIFY_CLIENT_SECRET", None) app = Sanic(NAME)
API_DEBUG = env.bool("DEBUG", False) API_VERSION = env("VERSION") with env.prefixed("DB_"): DB_HOST = env("HOST") DB_PORT = env.int("PORT") DB_NAME = env("NAME") DB_USERNAME = env("USERNAME") DB_PASSWORD = env("PASSWORD") DB_URI = DSN_TEMPLATE.format(username=DB_USERNAME, password=DB_PASSWORD, host=DB_HOST, port=DB_PORT, db_name=DB_NAME) with env.prefixed("LOGGER_"): LOGGER_NAME = env("NAME", "ReadingTracker") LOGGER_LEVEL = env.log_level("LEVEL", 'debug') with env.prefixed("PER_DAY_"): PAGES_PER_DAY = env.int('PER_DAY_PAGES', 50) # max count of cards repeated per day _MAX_PER_DAY = env.int('PER_DAY_CARDS', 25) with env.prefixed("DRIVE_"): DRIVE_TOKEN_PATH = env.path("TOKEN_PATH", "data/token.json") DRIVE_CREDS_PATH = env.path("CREDS_PATH", "data/creds.json") os.environ.clear()
LANGUAGE_CODE = "ru-ru" TIME_ZONE = "UTC" USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/3.2/howto/static-files/ STATIC_URL = "/static/" STATIC_ROOT = env.path("STATIC_ROOT", default=os.path.join(BASE_DIR, 'staticfiles')) STATICFILES_DIRS = env.list( "STATICFILES_DIRS", default=(os.path.join(BASE_DIR, 'static')), ) # User uploaded content settings MEDIA_URL = "/media/" MEDIA_ROOT = os.path.join(BASE_DIR, "media") # Default primary key field type # https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
import logging import os import sys from typing import List from environs import Env env = Env() env.read_env() BASE_MESSAGE_FORMAT = "[%(asctime)s] [%(name)s:%(levelname)s] " \ "[%(module)s:%(funcName)s():%(process)d]" DATE_FORMAT = "%d-%m-%Y %H:%M:%S" LOG_FOLDER = env.path('LOG_FOLDER') DEBUG = env.bool('DEBUG') try: os.makedirs(LOG_FOLDER, exist_ok=True) except PermissionError as e: pass class LevelFilter(logging.Filter): def __init__(self, levels: List[int] = None) -> None: self._levels = levels or [] super().__init__(self.__class__.__name__) def filter(self,
from environs import Env env = Env() env.read_env() TOKEN = env.str("TOKEN") PSNR_THRESHOLD = env.int("PSNR_THRESHOLD") IMAGE_DIR = env.path("IMAGE_DIR") IMAGE_DIR.mkdir(exist_ok=True, parents=True) DB_CONNECTION = env.str("DB_CONNECTION") DB_ECHO = env.bool("DB_ECHO", False)
import sys import sentry_sdk from sentry_sdk.integrations.aiohttp import AioHttpIntegration from tqdm import tqdm import peewee from bs4 import BeautifulSoup from bs4.element import Tag from peewee import IntegerField, CharField, ForeignKeyField from playhouse.db_url import connect from playhouse.shortcuts import model_to_dict, dict_to_model from environs import Env env = Env() env.read_env('dev.env') db = connect(env('DATABASE_URL')) data_dir = env.path('SEAFARER_DATA_DIR') sentry_sdk.init( env('SENTRY_TOKEN'), integrations=[AioHttpIntegration()] ) class BaseModel(Model): DATE_FORMAT = '%d.%m.%Y' class Meta: database = db class Department(BaseModel): name = CharField() def __str__(self):
def parse_config() -> DotDict: """ Parse configuration parameters from environment variables. Makes type validation. Raises: environs.EnvValidationError: if parsed data does not conform expected type. """ env = Env() env.read_env() config = { # Kafka related configuration # "kafka_broker_list": env.str("KAPG_BROKER_LIST", "localhost:9092,"), "kafka_topic": env.str("KAPG_KAFKA_TOPIC", "pagemonitor_metrics"), # Client group id string. All clients sharing the same group.id belong # to the same group "consumer_group.id": env.str("KAPG_GROUP_ID", "42"), # Action to take when there is no initial offset in offset store or # the desired offset is out of range "consumer_auto.offset.reset": env.str("KAPG_AUTOOFFSETRESET", "earliest"), "consumer_sleep_interval": env.float("KAPG_CONSUMER_SLEEP", 2.0), "kafka_enable_cert_auth": env.bool("KAPG_KAFKA_ENABLE_CERT_AUTH", False), # Only when cert authentication mode enabled "kafka_ssl_ca": env.path("KAPG_KAFKA_SSL_CA", "/etc/kapg/ssl/kafka/ca.pem"), "kafka_ssl_cert": env.path("KAPG_KAFKA_SSL_CERT", "/etc/kapg/ssl/kafka/service.cert"), "kafka_ssl_key": env.path("KAPG_KAFKA_SSL_KEY", "/etc/kapg/ssl/kafka/service.key"), # PostgreSQL related configuration # "pg_host": env.str("KAPG_PG_HOST", "localhost"), "pg_port": env.int("KAPG_PG_PORT", 5432), "pg_user": env.str("KAPG_PG_USER", "postgres"), "pg_password": env.str("KAPG_PG_PWD", "changeme"), "pg_db_name": env.str("KAPG_PG_DB_NAME", "metrics"), "pg_table_name": env.str("KAPG_PG_TABLE_NAME", "pagemonitor"), "pg_conn_timeout": env.float("KAPG_PG_CONN_TIMEOUT", 10.0), "pg_command_timeout": env.float("KAPG_PG_COMMAND_TIMEOUT", 10.0), # SSL config "pg_enable_ssl": env.bool("KAPG_PG_ENABLE_SSL", False), "pg_ssl_ca": env.path("KAPG_PG_SSL_CA", "/etc/kapg/ssl/postgres/ca.pem"), } return DotDict(config)