def setup(context=None, config=None, app_factory=get_app): app_abspath = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) app_config = Config(app_abspath) app_config.from_object('superdesk.tests.test_settings') app_config['APP_ABSPATH'] = app_abspath app_config.update(get_test_settings()) app_config.update(config or {}) app_config.update({ 'DEBUG': True, 'TESTING': True, }) app = app_factory(app_config) logger = logging.getLogger('superdesk') logger.setLevel(logging.ERROR) logger = logging.getLogger('elasticsearch') logger.setLevel(logging.ERROR) logger = logging.getLogger('urllib3') logger.setLevel(logging.ERROR) drop_elastic(app) drop_mongo(app) # create index again after dropping it app.data.init_elastic(app) if context: context.app = app context.client = app.test_client()
def test_config(): " how can you read the config if do not create an app? " "" config = Config(".") print(config) config.from_object("cryptoadvance.specter.config.DevelopmentConfig") print(config) assert config["PORT"] == 25441
def setup_config(config): app_abspath = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) app_config = Config(app_abspath) app_config.from_object("superdesk.default_settings") update_config(app_config) app_config.update( config or {}, **{ "APP_ABSPATH": app_abspath, "DEBUG": True, "TESTING": True, }, ) logging.getLogger("apps").setLevel(logging.WARNING) logging.getLogger("elastic").setLevel(logging.WARNING) # elastic datalayer logging.getLogger("urllib3").setLevel(logging.WARNING) logging.getLogger("celery").setLevel(logging.WARNING) logging.getLogger("superdesk").setLevel(logging.ERROR) logging.getLogger("elasticsearch").setLevel(logging.ERROR) logging.getLogger("superdesk.errors").setLevel(logging.CRITICAL) return {key: deepcopy(val) for key, val in app_config.items()}
def get_config( self, bundle: Bundle, env: Union[DEV, PROD, STAGING, TEST], ) -> AttrDict: bundle_config_module = self.import_bundle_module(bundle) base_config = getattr(bundle_config_module, BASE_CONFIG, None) env_config = getattr(bundle_config_module, ENV_CONFIGS[env], None) if (isinstance(bundle, AppBundle) and (not base_config or not issubclass(base_config, AppConfig))): raise Exception("Could not find an AppConfig subclass in your app " "bundle's config module.") merged = Config(None) for config in [base_config, env_config]: if config: merged.from_object(config) if isinstance(bundle, AppBundle) and 'SECRET_KEY' not in merged: raise Exception( "The `SECRET_KEY` config option is required. " "Please set it in your app bundle's base `Config` class.") return AttrDict(merged)
def setup_config(config): app_abspath = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) app_config = Config(app_abspath) app_config.from_object("superdesk.default_settings") cwd = Path.cwd() for p in [cwd] + list(cwd.parents): settings = p / "settings.py" if settings.is_file(): logger.info(f"using local settings from {settings}") app_config.from_pyfile(settings) break else: logger.warning("Can't find local settings") update_config(app_config) app_config.update( config or {}, **{ "APP_ABSPATH": app_abspath, "DEBUG": True, "TESTING": True, }, ) logging.getLogger("apps").setLevel(logging.WARNING) logging.getLogger("elastic").setLevel(logging.WARNING) # elastic datalayer logging.getLogger("urllib3").setLevel(logging.WARNING) logging.getLogger("celery").setLevel(logging.WARNING) logging.getLogger("superdesk").setLevel(logging.ERROR) logging.getLogger("elasticsearch").setLevel(logging.ERROR) logging.getLogger("superdesk.errors").setLevel(logging.CRITICAL) return {key: deepcopy(val) for key, val in app_config.items()}
def __init__(self, app_name='enhanced-flask-app', config_file=None, js_filters=[], css_filters=[], **flask_kwargs): """ """ config = Config('.', Flask.default_config) if config_file: config.from_object(config_file) self.app = Flask(app_name, **flask_kwargs) self.app.config = config # Create webassets self.assets_env = Environment(self.app) self.assets_env.url_expire = True self.assets_env.url = '/static' # Initialize additional jinja stuff self.enhance_jinja(self.app.jinja_env) # Flask assets related self.js_filters = [] self.css_filters = [] self.depends_scss = [] self.enhance_assets()
def Config(envvar_silent=True) -> FlaskConfig: global _Config_instance if _Config_instance is None: _Config_instance = FlaskConfig('.') _Config_instance.from_object('ledslie.defaults') _Config_instance.from_envvar('LEDSLIE_CONFIG', silent=envvar_silent) return _Config_instance
def app(): from flask import Config from newsroom.web import NewsroomWebApp cfg = Config(root) cfg.from_object('newsroom.default_settings') update_config(cfg) return NewsroomWebApp(config=cfg, testing=True)
def app(): from flask import Config from newsroom import Newsroom cfg = Config(root) cfg.from_object('newsroom.default_settings') update_config(cfg) return Newsroom(config=cfg)
def app(): from flask import Config from newsroom.news_api.app import NewsroomNewsAPI cfg = Config(root) cfg.from_object('newsroom.news_api.settings') update_config(cfg) return NewsroomNewsAPI(config=cfg, testing=True)
def create_app(config_file=None, **kwargs): """ Create a new eve app object and initialize everything. User configuration can be loaded in the following order: 1. Use the `config_file` arg to specify a file 2. If `config_file` is `None`, you set the environment variable `AMIVAPI_CONFIG` to the path of your config file 3. If no environment variable is set either, `config.py` in the current working directory is used Args: config (path): Specify config file to use. kwargs: All other key-value arguments will be used to update the config Returns: (Eve): The Eve application """ # Load config config = Config(getcwd()) config.from_object("amivapi.settings") # Specified path > environment var > default path; abspath for better log user_config = abspath(config_file or getenv('AMIVAPI_CONFIG', 'config.py')) try: config.from_pyfile(user_config) config_status = "Config loaded: %s" % user_config except IOError: config_status = "No config found." config.update(kwargs) app = Eve(settings=config, validator=ValidatorAMIV) app.logger.info(config_status) # Set up error logging with sentry init_sentry(app) # Create LDAP connector ldap.init_app(app) # Initialize modules to register resources, validation, hooks, auth, etc. users.init_app(app) auth.init_app(app) events.init_app(app) groups.init_app(app) joboffers.init_app(app) beverages.init_app(app) studydocs.init_app(app) cascade.init_app(app) cron.init_app(app) documentation.init_app(app) # Fix that eve doesn't run hooks on embedded documents app.on_fetched_item += utils.run_embedded_hooks_fetched_item app.on_fetched_resource += utils.run_embedded_hooks_fetched_resource return app
def get(pipeline_name): config = Config(None) config.from_object('brainminer.settings') pipelines = config['PIPELINES'] if pipeline_name not in pipelines.keys(): return None pipeline_cls = getattr(importlib.import_module( pipelines[pipeline_name]['module_path']), pipelines[pipeline_name]['class_name']) pipeline_obj = pipeline_cls() return pipeline_obj
def _init_config(): import os import sys from flask import Config from . import defconfig config = Config(os.path.abspath(os.path.curdir)) config.from_object(defconfig) del sys.modules[__name__]._init_config return config
def create_app(config_file=None, **kwargs): """Create a new eve app object and initialize everything. User configuration can be loaded in the following order: 1. Use the `config_file` arg to specify a file 2. If `config_file` is `None`, you set the environment variable `PVK_CONFIG` to the path of your config file 3. If no environment variable is set either, `config.py` in the current working directory is used Args: config (path): Specify config file to use. kwargs: All other key-value arguments will be used to update the config Returns: (Eve): The Eve application """ # Load config config = Config(getcwd()) config.from_object("backend.settings") # Specified path > environment var > default path; abspath for better log user_config = abspath(config_file or getenv('PVK_CONFIG', 'config.py')) try: config.from_pyfile(user_config) config_status = "Config loaded: %s" % user_config except IOError: config_status = "No config found." config.update(kwargs) # Create the app object application = Eve(auth=APIAuth, validator=APIValidator, settings=config) application.logger.info(config_status) # Eve provides hooks at several points of the request, # we use this do add dynamic filtering for resource in ['signups', 'selections']: for method in ['GET', 'PATCH', 'DELETE']: event = getattr(application, 'on_pre_%s_%s' % (method, resource)) event += only_own_nethz # Also use hooks to add pre- and postprocessing to resources application.on_post_POST_signups += new_signups application.on_deleted_item_signups += deleted_signup application.on_updated_signups += patched_signup application.on_updated_courses += patched_course application.on_delete_item_courses += block_course_deletion application.on_insert_payments += create_payment application.on_inserted_payments += mark_as_paid application.on_deleted_item_payments += mark_as_unpaid return application
def setup_config(config): app_abspath = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) app_config = Config(app_abspath) app_config.from_object('superdesk.default_settings') update_config(app_config) app_config.update(config or {}, **{ 'APP_ABSPATH': app_abspath, 'DEBUG': True, 'TESTING': True, }) logging.getLogger('superdesk').setLevel(logging.WARNING) logging.getLogger('elastic').setLevel(logging.WARNING) # elastic datalayer logging.getLogger('elasticsearch').setLevel(logging.WARNING) logging.getLogger('urllib3').setLevel(logging.WARNING) return app_config
class ConfigurationRegistry(ModuleDiscoveryRegistry): """ Specialized ``ModuleDiscoveryRegistry`` that search for ``config`` modules in a list of Python packages and merge them into the Flask application config without overwriting already set variables. :param app: A Flask application :param registry_namespace: The registry namespace of an ``ImportPathRegistry`` with a list Python packages to search for ``config`` modules in. Defaults to ``packages``. """ def __init__(self, app, registry_namespace=None): super(ConfigurationRegistry, self).__init__( 'config', registry_namespace=registry_namespace, with_setup=False, ) # Create a new configuration module to collect configuration in. from flask import Config self.new_config = Config(app.config.root_path) # Auto-discover configuration in packages self.discover(app) # Overwrite default configuration with user specified configuration self.new_config.update(app.config) app.config = self.new_config def register(self, new_object): """ Register a new ``config`` module. :param new_object: The configuration module. ``app.config.from_object()`` will be called on it. """ self.new_config.from_object(new_object) super(ConfigurationRegistry, self).register(new_object) def unregister(self, *args, **kwargs): """ It is not possible to unregister configuration. """ raise NotImplementedError()
def setup_config(config): app_abspath = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) app_config = Config(app_abspath) app_config.from_object("superdesk.default_settings") cwd = Path.cwd() for p in [cwd] + list(cwd.parents): settings = p / "settings.py" if settings.is_file(): logger.info(f"using local settings from {settings}") app_config.from_pyfile(settings) break else: logger.warning("Can't find local settings") update_config(app_config) app_config.setdefault("INSTALLED_APPS", []) # Extend the INSTALLED APPS with the list provided if config: config.setdefault("INSTALLED_APPS", []) app_config["INSTALLED_APPS"].extend(config.pop("INSTALLED_APPS", [])) # Make sure there are no duplicate entries in INSTALLED_APPS app_config["INSTALLED_APPS"] = list(set(app_config["INSTALLED_APPS"])) app_config.update( config or {}, **{ "APP_ABSPATH": app_abspath, "DEBUG": True, "TESTING": True, }, ) logging.getLogger("apps").setLevel(logging.WARNING) logging.getLogger("elastic").setLevel(logging.WARNING) # elastic datalayer logging.getLogger("urllib3").setLevel(logging.WARNING) logging.getLogger("celery").setLevel(logging.WARNING) logging.getLogger("superdesk").setLevel(logging.ERROR) logging.getLogger("elasticsearch").setLevel(logging.ERROR) logging.getLogger("superdesk.errors").setLevel(logging.CRITICAL) return {key: deepcopy(val) for key, val in app_config.items()}
def setup_config(config): app_abspath = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) app_config = Config(app_abspath) app_config.from_object('superdesk.default_settings') update_config(app_config) app_config.update(config or {}, **{ 'APP_ABSPATH': app_abspath, 'DEBUG': True, 'TESTING': True, }) logging.getLogger('apps').setLevel(logging.WARNING) logging.getLogger('elastic').setLevel(logging.WARNING) # elastic datalayer logging.getLogger('urllib3').setLevel(logging.WARNING) logging.getLogger('celery').setLevel(logging.WARNING) logging.getLogger('superdesk').setLevel(logging.ERROR) logging.getLogger('elasticsearch').setLevel(logging.ERROR) return app_config
def setup_config(config): app_abspath = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) app_config = Config(app_abspath) app_config.from_object('superdesk.tests.test_settings') app_config['APP_ABSPATH'] = app_abspath app_config.update(get_test_settings()) app_config.update(config or {}) app_config.update({ 'DEBUG': True, 'TESTING': True, }) logging.getLogger('superdesk').setLevel(logging.WARNING) logging.getLogger('elastic').setLevel(logging.WARNING) # elastic datalayer logging.getLogger('elasticsearch').setLevel(logging.WARNING) logging.getLogger('urllib3').setLevel(logging.WARNING) return app_config
class ConfigurationRegistry(ModuleDiscoveryRegistry): """ Specialized ``ModuleDiscoveryRegistry`` that search for ``config`` modules in a list of Python packages and merge them into the Flask application config without overwriting already set variables. :param app: A Flask application :param registry_namespace: The registry namespace of an ``ImportPathRegistry`` with a list Python packages to search for ``config`` modules in. Defaults to ``packages``. """ def __init__(self, app, registry_namespace=None): super(ConfigurationRegistry, self).__init__( 'config', registry_namespace=registry_namespace, with_setup=False, ) # Create a new configuration module to collect configuration in. self.new_config = Config(app.config.root_path) # Auto-discover configuration in packages self.discover(app) # Overwrite default configuration with user specified configuration self.new_config.update(app.config) app.config.update(self.new_config) def register(self, new_object): """ Register a new ``config`` module. :param new_object: The configuration module. ``app.config.from_object()`` will be called on it. """ self.new_config.from_object(new_object) super(ConfigurationRegistry, self).register(new_object) def unregister(self, *args, **kwargs): """ It is not possible to unregister configuration. """ raise NotImplementedError()
def entrypoint(mode="flask"): """Application factory.""" assert isinstance(mode, str), f"bad mode type '{type(mode)}'" assert mode in ('flask', 'celery'), f"bad mode '{mode}'" logger.info( f"Configuring {BaseConfig.APP_NAME} application with {mode} mode") logger.info("Configuring flask...") # Create a Flask Config object from APP_SETTINGS app_settings = BaseConfig.APP_SETTINGS config = Config('') config.from_object(app_settings) app = Eve(settings=config, validator=CustomValidator) CORS(app) # Finalize celery configuration configure_celery(app, celery) # Custom flask encoders app.json_encoder = CustomJSONEncoder # Set restplus custom routes blueprint bp = Blueprint('bp', __name__, url_prefix=f"/{BaseConfig.APP_NAME}") rp.init_app(bp) # rp.namespaces.clear() rp.add_namespace(analyzer) # Register custom routes Blueprint app.register_blueprint(bp) app.register_blueprint(swagger) app.config["SWAGGER_INFO"] = swagger_info app.logger = configure_logger(app.logger) # setting models attribute to flask setattr(app, "models", {}) if mode == "flask": logger.info("Flask configuration successful!") return app if mode == "celery": logger.info("Celery configuration successful!") return celery
def main(expected): config = Config(here) config.from_object('ooi_status.default_settings') if 'OOISTATUS_SETTINGS' in os.environ: config.from_envvar('OOISTATUS_SETTINGS') for key in config: log.info('OOI_STATUS CONFIG: %r: %r', key, config[key]) monitor = StatusMonitor(config) if expected: monitor.read_expected_csv(expected) else: scheduler = BlockingScheduler() log.info('adding jobs') # notify on change every minute scheduler.add_job(monitor.check_all, 'cron', second=0) scheduler.add_job(monitor.notify_all, 'cron', second=10) log.info('starting jobs') scheduler.start()
class ConfigurationRegistry(ModuleDiscoveryRegistry): """ Specialized import path registry that takes the initial list of import paths from ``PACKAGES`` configuration variable. Example:: app.extensions['registry']['packages'] = PackageRegistry() app.extendsions['registry']['config'] = ConfigurationRegistry( _app, base_config='invenio.core.config' ) """ def __init__(self, app, registry_namespace=None): super(ConfigurationRegistry, self).__init__( 'config', registry_namespace=registry_namespace, with_setup=False, ) # Create a new configuration module to collect configuration in. from flask import Config self.new_config = Config(app.config.root_path) # Auto-discover configuration in packages self.discover(app) # Overwrite default configuration with user specified configuration self.new_config.update(app.config) app.config = self.new_config def register(self, new_object): self.new_config.from_object(new_object) super(ConfigurationRegistry, self).register(new_object) def unregister(self, *args, **kwargs): raise NotImplementedError()
def get_user_config(): from flask import Config config = Config('/') config.from_object('alerta.settings') config.from_pyfile('/etc/alertad.conf', silent=True) config.from_envvar('ALERTA_SVR_CONF_FILE', silent=True) if 'DEBUG' in os.environ: config['DEBUG'] = True if 'BASE_URL' in os.environ: config['BASE_URL'] = os.environ['BASE_URL'] if 'SECRET_KEY' in os.environ: config['SECRET_KEY'] = os.environ['SECRET_KEY'] database_url = ( os.environ.get('DATABASE_URL', None) or # The following database settings are deprecated. os.environ.get('MONGO_URI', None) or os.environ.get('MONGODB_URI', None) or os.environ.get('MONGOHQ_URL', None) or os.environ.get('MONGOLAB_URI', None) ) # Use app config for DATABASE_URL if no env var from above override it config['DATABASE_URL'] = database_url or config['DATABASE_URL'] if 'DATABASE_NAME' in os.environ: config['DATABASE_NAME'] = os.environ['DATABASE_NAME'] if 'AUTH_REQUIRED' in os.environ: config['AUTH_REQUIRED'] = True if os.environ['AUTH_REQUIRED'] == 'True' else False if 'ADMIN_USERS' in os.environ: config['ADMIN_USERS'] = os.environ['ADMIN_USERS'].split(',') if 'CUSTOMER_VIEWS' in os.environ: config['CUSTOMER_VIEWS'] = True if os.environ['CUSTOMER_VIEWS'] == 'True' else False if 'OAUTH2_CLIENT_ID' in os.environ: config['OAUTH2_CLIENT_ID'] = os.environ['OAUTH2_CLIENT_ID'] if 'OAUTH2_CLIENT_SECRET' in os.environ: config['OAUTH2_CLIENT_SECRET'] = os.environ['OAUTH2_CLIENT_SECRET'] if 'ALLOWED_EMAIL_DOMAINS' in os.environ: config['ALLOWED_EMAIL_DOMAINS'] = os.environ['ALLOWED_EMAIL_DOMAINS'].split(',') if 'GITHUB_URL' in os.environ: config['GITHUB_URL'] = os.environ['GITHUB_URL'] if 'ALLOWED_GITHUB_ORGS' in os.environ: config['ALLOWED_GITHUB_ORGS'] = os.environ['ALLOWED_GITHUB_ORGS'].split(',') if 'GITLAB_URL' in os.environ: config['GITLAB_URL'] = os.environ['GITLAB_URL'] if 'ALLOWED_GITLAB_GROUPS' in os.environ: config['ALLOWED_GITLAB_GROUPS'] = os.environ['ALLOWED_GITLAB_GROUPS'].split(',') if 'KEYCLOAK_URL' in os.environ: config['KEYCLOAK_URL'] = os.environ['KEYCLOAK_URL'] if 'KEYCLOAK_REALM' in os.environ: config['KEYCLOAK_REALM'] = os.environ['KEYCLOAK_REALM'] if 'ALLOWED_KEYCLOAK_ROLES' in os.environ: config['ALLOWED_KEYCLOAK_ROLES'] = os.environ['ALLOWED_KEYCLOAK_ROLES'].split(',') if 'PINGFEDERATE_OPENID_ACCESS_TOKEN_URL' in os.environ: config['PINGFEDERATE_OPENID_ACCESS_TOKEN_URL'] = os.environ['PINGFEDERATE_OPENID_ACCESS_TOKEN_URL'].split(',') if 'PINGFEDERATE_OPENID_PAYLOAD_USERNAME' in os.environ: config['PINGFEDERATE_OPENID_PAYLOAD_USERNAME'] = os.environ['PINGFEDERATE_OPENID_PAYLOAD_USERNAME'].split(',') if 'PINGFEDERATE_OPENID_PAYLOAD_EMAIL' in os.environ: config['PINGFEDERATE_OPENID_PAYLOAD_EMAIL'] = os.environ['PINGFEDERATE_OPENID_PAYLOAD_EMAIL'].split(',') if 'PINGFEDERATE_OPENID_PAYLOAD_GROUP' in os.environ: config['PINGFEDERATE_OPENID_PAYLOAD_GROUP'] = os.environ['PINGFEDERATE_OPENID_PAYLOAD_GROUP'].split(',') if 'PINGFEDERATE_PUBKEY_LOCATION' in os.environ: config['PINGFEDERATE_PUBKEY_LOCATION'] = os.environ['PINGFEDERATE_PUBKEY_LOCATION'].split(',') if 'PINGFEDERATE_TOKEN_ALGORITHM' in os.environ: config['PINGFEDERATE_TOKEN_ALGORITHM'] = os.environ['PINGFEDERATE_TOKEN_ALGORITHM'].split(',') if 'CORS_ORIGINS' in os.environ: config['CORS_ORIGINS'] = os.environ['CORS_ORIGINS'].split(',') if 'MAIL_FROM' in os.environ: config['MAIL_FROM'] = os.environ['MAIL_FROM'] if 'SMTP_PASSWORD' in os.environ: config['SMTP_PASSWORD'] = os.environ['SMTP_PASSWORD'] if 'PLUGINS' in os.environ: config['PLUGINS'] = os.environ['PLUGINS'].split(',') # Runtime config check if config['CUSTOMER_VIEWS'] and not config['AUTH_REQUIRED']: raise RuntimeError('Must enable authentication to use customer views') if config['CUSTOMER_VIEWS'] and not config['ADMIN_USERS']: raise RuntimeError('Customer views is enabled but there are no admin users') return config
import os from flask import Config from flask_sqlalchemy import SQLAlchemy from flask_bcrypt import Bcrypt from flask_mail import Mail from celery import Celery from oauthlib.oauth2 import WebApplicationClient from .api.common.base_definitions import BaseFlask # flask config conf = Config(root_path=os.path.abspath(os.path.dirname(__file__))) conf.from_object(os.getenv('APP_SETTINGS')) # instantiate the extensions db = SQLAlchemy() bcrypt = Bcrypt() mail = Mail() def create_app(): # instantiate the app app = BaseFlask(__name__) # set up extensions db.init_app(app) bcrypt.init_app(app) mail.init_app(app) # register blueprints from .api.v1.auth import auth_blueprints from .api.v1.user import user_blueprints from .api.v1.admin import admin_blueprints
* OpenShift * DATABASE_URI """ import os import json import re from distutils.util import strtobool from flask import Config from sqlalchemy.engine.url import URL config = Config('.') config.from_object('compair.settings') config.from_pyfile(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../config.py'), silent=True) if os.environ.get('OPENSHIFT_MYSQL_DB_HOST'): config['SQLALCHEMY_DATABASE_URI'] = URL( 'mysql+pymysql', host=os.getenv('OPENSHIFT_MYSQL_DB_HOST', 'localhost'), port=os.getenv('OPENSHIFT_MYSQL_DB_PORT', '3306'), username=os.getenv('OPENSHIFT_MYSQL_DB_USERNAME', 'compair'), password=os.getenv('OPENSHIFT_MYSQL_DB_PASSWORD', 'compair'), database=os.getenv('OPENSHIFT_GEAR_NAME', 'compair'), ) elif os.environ.get('DB_HOST') or os.environ.get('DB_PORT') or os.environ.get('DB_USERNAME') \ or os.environ.get('DB_PASSWORD') or os.environ.get('DB_NAME'): config['SQLALCHEMY_DATABASE_URI'] = URL( os.getenv('DB_DRIVER', 'mysql+pymysql'),
def get_worker_username_and_password(): config = Config(None) config.from_object('service.compute.settings') return config['SERVICE_WORKER_USERNAME'], config['SERVICE_WORKER_PASSWORD']
Currently the supported environment variables: * OpenShift * DATABASE_URI """ import os import json import re from distutils.util import strtobool from flask import Config from sqlalchemy.engine.url import URL config = Config('.') config.from_object('compair.settings') config.from_pyfile(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../config.py'), silent=True) if os.environ.get('OPENSHIFT_MYSQL_DB_HOST'): config['SQLALCHEMY_DATABASE_URI'] = URL( 'mysql+pymysql', host=os.getenv('OPENSHIFT_MYSQL_DB_HOST', 'localhost'), port=os.getenv('OPENSHIFT_MYSQL_DB_PORT', '3306'), username=os.getenv('OPENSHIFT_MYSQL_DB_USERNAME', 'compair'), password=os.getenv('OPENSHIFT_MYSQL_DB_PASSWORD', 'compair'), database=os.getenv('OPENSHIFT_GEAR_NAME', 'compair'), ) elif os.environ.get('DB_HOST') or os.environ.get('DB_PORT') or os.environ.get('DB_USERNAME') \ or os.environ.get('DB_PASSWORD') or os.environ.get('DB_NAME'):
def create_app(config_file=None, **kwargs): """ Create a new eve app object and initialize everything. User configuration can be loaded in the following order: 1. Use the `config_file` arg to specify a file 2. If `config_file` is `None`, you set the environment variable `AMIVAPI_CONFIG` to the path of your config file 3. If no environment variable is set either, `config.py` in the current working directory is used Args: config (path): Specify config file to use. kwargs: All other key-value arguments will be used to update the config Returns: (Eve): The Eve application """ # Load config config = Config(getcwd()) config.from_object("amivapi.settings") # Specified path > environment var > default path; abspath for better log user_config = abspath(config_file or getenv('AMIVAPI_CONFIG', 'config.py')) try: config.from_pyfile(user_config) config_status = "Config loaded: %s" % user_config except IOError: config_status = "No config found." config.update(kwargs) # Initialize empty domain to create Eve object, register resources later config['DOMAIN'] = {} app = Eve("amivapi", # Flask needs this name to find the static folder settings=config, validator=ValidatorAMIV) app.logger.info(config_status) # Set up error logging with sentry init_sentry(app) # Create LDAP connector ldap.init_app(app) # Initialize modules to register resources, validation, hooks, auth, etc. users.init_app(app) auth.init_app(app) events.init_app(app) groups.init_app(app) blacklist.init_app(app) joboffers.init_app(app) beverages.init_app(app) studydocs.init_app(app) cascade.init_app(app) cron.init_app(app) documentation.init_app(app) # Fix that eve doesn't run hooks on embedded documents app.on_fetched_item += utils.run_embedded_hooks_fetched_item app.on_fetched_resource += utils.run_embedded_hooks_fetched_resource return app
from flask_oauthlib.client import OAuth from flask.ext.login import LoginManager, login_required, \ login_user, logout_user from matchmaker import app from models import User, BotIdentity # Wire everything up login_manager = LoginManager() login_manager.init_app(app) oauth = OAuth(app) # Casino GitHub OAuth oauth_config = Config("") oauth_config.from_object('matchmaker.default_oauth') oauth_config.from_envvar('CASINO_OAUTH', silent=True) github = oauth.remote_app( 'github', **{k.lower(): v for k, v in oauth_config.items()} ) @app.route('/login') def login(): return github.authorize( callback=url_for('authorized', _external=True, _scheme=app.config.get("PREFERRED_URL_SCHEME")) )
class ControlWorkers(object): def __init__(self, args): self.env = args.env self.action = args.action self.kill_delay = args.kill_delay self.conn = None # Load settings self.config = Config(os.path.dirname(os.path.realpath(__file__))) # There may be overiding settings specific to the server we are running on servername = socket.gethostname().split('.')[0] if servername and os.path.isfile(f'settings/workers/{self.env}_{servername}.py'): self.config.from_object(f'settings.workers.{self.env}_{servername}.Config') else: self.config.from_object(f'settings.workers.{self.env}.Config') # Redis if self.config['REDIS_USE_SENTINEL']: sentinel = redis.sentinel.Sentinel( self.config['REDIS_ADDRESS'], db=self.config['REDIS_DB'], password=self.config['REDIS_PASSWORD'], decode_responses=True ) self.conn = sentinel.master_for(self.config['REDIS_SENTINEL_MASTER']) else: self.conn = redis.StrictRedis( host=self.config['REDIS_ADDRESS'][0], port=self.config['REDIS_ADDRESS'][1], db=self.config['REDIS_DB'], password=self.config['REDIS_PASSWORD'], decode_responses=True ) def run(self): if self.action == 'status': self.status_workers() elif self.action == 'spawn': self.spawn_worker() elif self.action == 'stop': self.stop_workers() elif self.action == 'respawn': self.stop_workers() self.spawn_worker() def spawn_worker(self): """This will spawn an initial worker. More workers will be spawned as needed within limits set in config. """ # Check we don't already have workers running procs = self.get_worker_processes() if procs: sys.exit( "Not spawning new worker because running worker process(es) found " f"pids: {' '.join([p.pid for p in procs])}" ) r = subprocess.Popen( spawn_command, stdout=open(logfile_path, 'a'), stderr=open(logfile_path, 'a') ) if r.returncode: print(f'Failed to spawn worker, return code {r.returncode}') print(f'There may be more info in {logfile_path}') sys.exit(1) def stop_workers(self): monitors.shutdown_workers(self.conn, AssetWorker, uuid.getnode()) # Allow time for monitors to kill themselves while self.kill_delay > 0 and self.get_worker_processes(): time.sleep(1) self.kill_delay -= 1 # Forcibly kill any that remain as procs = [p for p in psutil.process_iter() if command_signature in ' '.join(p.cmdline())] if not procs: return for p in alive: print(f"forcibly killing {p.pid}") p.kill() sys.exit(1) def respawn_workers(self): self.stop_workers() self.spawn_worker def status_workers(self): procs = self.get_worker_processes() print(f"{len(procs)} workers running") if procs: sys.exit(0) else: sys.exit(1) def get_worker_processes(self): return [p for p in psutil.process_iter() if command_signature in ' '.join(p.cmdline())]
def config(): config_ = Config(None) config_.from_object('service.compute.settings') return config_
class Configurator(object): """ Object that takes care of loading the different configurations from the different sources. There are 3 types of settings: * Project: The basic set of settings needed by the system. These are shipped with Shiva. * Local: Specific to each instance, useful for overwriting system settings. Some of them must be defined before running Shiva, like the DB URI. * Debug: This setting will only be loaded if ``DEBUG`` is set to True in the local settings. There are also 3 different places where Shiva looks for this config files: * A ``local.py`` file inside the ``config/`` directory, relative to Shiva. * The ``$SHIVA_CONFIG`` environment variable. It's assumed to be pointing to a file (not a dir) if exists. * The ``$XDG_CONFIG_HOME/shiva/config.py`` file. If ``$XDG_CONFIG_HOME`` is not set, defaults to ``$HOME/.config``, as defined by the `XDG Base Directory Specification <http://standards.freedesktop.org/basedir-spec/basedir-spec-latest\ .html>`_. """ def __init__(self): self._config = FlaskConfig("") # project _project = self.load_project() # local _xdg_config = self.from_xdg_config() _env = self.from_env() _local = self.from_local() # debug _debug = self.load_debug() self.extract_conf(self._config) if not (_xdg_config or _env or _local): raise NoConfigFoundError def load_project(self): return self._config.from_object(project) def get_xdg_path(self): path_home = os.getenv("HOME") if not path_home: return None default_config_home = os.path.join(path_home, ".config") return os.getenv("XDG_CONFIG_HOME") or default_config_home def from_xdg_config(self): xdg_path = self.get_xdg_path() if not xdg_path: return False local_py = os.path.join(xdg_path, "shiva/config.py") if not os.path.exists(local_py): return False return self._config.from_pyfile(local_py) def from_env(self): if not os.getenv("SHIVA_CONFIG"): return False return self._config.from_envvar("SHIVA_CONFIG") def from_local(self): with ignored(ImportError): self._config.from_object("shiva.config.local") return True return False def load_debug(self): if not self._config.get("DEBUG"): return False loaded = False with ignored(ImportError): from shiva.config import debug loaded = self._config.from_object(debug) xdg_path = self.get_xdg_path() if not xdg_path: return False debug_py = os.path.join(xdg_path, "shiva/debug.py") if not os.path.exists(debug_py): return False return self._config.from_pyfile(debug_py) or loaded def extract_conf(self, *args): """ Receives one or more objects, iterates over their elements, extracts all the uppercase properties and injects them into the Configurator object. """ for obj in args: for key in dir(obj): if key.isupper(): setattr(self, key, getattr(obj, key)) if hasattr(obj, "iterkeys") and callable(getattr(obj, "iterkeys")): for key in obj.iterkeys(): if key.isupper(): setattr(self, key, obj[key])
def get_all(): config = Config(None) config.from_object('service.compute.settings') return config['PIPELINES']
import os from functools import wraps import celery from depot.manager import DepotManager from flask import Config from coding_challenge_restful.core.s3 import get_file_from_s3 from coding_challenge_restful.extensions import db, AsyncTask, s3_client, BulkCSVUpload config_name = 'coding_challenge_restful.settings.Config' config = Config("") config.from_object(config_name) class CeleryBaseTask(celery.Task): def on_success(self, retval, task_id, args, kwargs): """Success handler. Run by the worker if the task executes successfully. Arguments: retval (Any): The return value of the task. task_id (str): Unique id of the executed task. args (Tuple): Original arguments for the executed task. kwargs (Dict): Original keyword arguments for the executed task. Returns: None: The return value of this handler is ignored. """ print("Success")
class Configurator(object): """ Object that takes care of loading the different configurations from the different sources. There are 3 types of settings: * Project: The basic set of settings needed by the system. These are shipped with Shiva. * Local: Specific to each instance, useful for overwriting system settings. Some of them must be defined before running Shiva, like the DB URI. * Debug: This setting will only be loaded if ``DEBUG`` is set to True in the local settings. There are also 3 different places where Shiva looks for this config files: * A ``local.py`` file inside the ``config/`` directory, relative to Shiva. * The ``$SHIVA_CONFIG`` environment variable. It's assumed to be pointing to a file (not a dir) if exists. * The ``$XDG_CONFIG_HOME/shiva/config.py`` file. If ``$XDG_CONFIG_HOME`` is not set, defaults to ``$HOME/.config``, as defined by the `XDG Base Directory Specification <http://standards.freedesktop.org/basedir-spec/basedir-spec-latest\ .html>`_. """ def __init__(self): self._config = FlaskConfig('') # project _project = self.load_project() # local _xdg_config = self.from_xdg_config() _env = self.from_env() _local = self.from_local() # debug _debug = self.load_debug() self.extract_conf(self._config) if not (_xdg_config or _env or _local): raise NoConfigFoundError def load_project(self): return self._config.from_object(project) def get_xdg_path(self): default_config_home = os.path.join(os.getenv('HOME'), '.config') return os.getenv('XDG_CONFIG_HOME') or default_config_home def from_xdg_config(self): local_py = os.path.join(self.get_xdg_path(), 'shiva/config.py') if not os.path.exists(local_py): return False return self._config.from_pyfile(local_py) def from_env(self): if not os.getenv('SHIVA_CONFIG'): return False return self._config.from_envvar('SHIVA_CONFIG') def from_local(self): with ignored(ImportError): self._config.from_object('shiva.config.local') return True return False def load_debug(self): if not self._config.get('DEBUG'): return False loaded = False with ignored(ImportError): from shiva.config import debug loaded = self._config.from_object(debug) debug_py = os.path.join(self.get_xdg_path(), 'shiva/debug.py') if not os.path.exists(debug_py): return False return self._config.from_pyfile(debug_py) or loaded def extract_conf(self, *args): """ Receives one or more objects, iterates over their elements, extracts all the uppercase properties and injects them into the Configurator object. """ for obj in args: for key in dir(obj): if key.isupper(): setattr(self, key, getattr(obj, key)) if hasattr(obj, 'iterkeys') and callable(getattr(obj, 'iterkeys')): for key in obj.iterkeys(): if key.isupper(): setattr(self, key, obj[key])
* DATABASE Currently the supported environment variables: * OpenShift * DATABASE_URI """ import os from flask import Config from sqlalchemy.engine.url import URL config = Config('.') config.from_object('acj.settings') config.from_pyfile(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../config.py'), silent=True) if os.environ.get('OPENSHIFT_MYSQL_DB_HOST'): config['SQLALCHEMY_DATABASE_URI'] = URL({ 'drivername': 'mysql', 'host': os.environ.get('OPENSHIFT_MYSQL_DB_HOST'), 'port': os.environ.get('OPENSHIFT_MYSQL_DB_PORT'), 'username': os.environ.get('OPENSHIFT_MYSQL_DB_USERNAME'), 'password': os.environ.get('OPENSHIFT_MYSQL_DB_PASSWORD'), 'database': os.environ.get('OPENSHIFT_GEAR_NAME'), }) elif os.environ.get('DATABASE_URI'): config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URI') elif "DATABASE" in config and 'DATABASE_URI' not in config: config['SQLALCHEMY_DATABASE_URI'] = URL(**config['DATABASE'])
* DATABASE Currently the supported environment variables: * OpenShift * DATABASE_URI """ import os from flask import Config from sqlalchemy.engine.url import URL config = Config(".") config.from_object("compair.settings") config.from_pyfile(os.path.join(os.path.dirname(os.path.abspath(__file__)), "../config.py"), silent=True) if os.environ.get("OPENSHIFT_MYSQL_DB_HOST"): config["SQLALCHEMY_DATABASE_URI"] = URL( "mysql+pymysql", host=os.getenv("OPENSHIFT_MYSQL_DB_HOST", "localhost"), port=os.getenv("OPENSHIFT_MYSQL_DB_PORT", "3306"), username=os.getenv("OPENSHIFT_MYSQL_DB_USERNAME", "compair"), password=os.getenv("OPENSHIFT_MYSQL_DB_PASSWORD", "compair"), database=os.getenv("OPENSHIFT_GEAR_NAME", "compair"), ) elif ( os.environ.get("DB_HOST") or os.environ.get("DB_PORT") or os.environ.get("DB_USERNAME")
def get_user_config(): from flask import Config config = Config('/') config.from_object('alerta.settings') config.from_pyfile('/etc/alertad.conf', silent=True) config.from_envvar('ALERTA_SVR_CONF_FILE', silent=True) if 'DEBUG' in os.environ: config['DEBUG'] = True if 'BASE_URL' in os.environ: config['BASE_URL'] = os.environ['BASE_URL'] if 'USE_PROXYFIX' in os.environ: config['USE_PROXYFIX'] = True if os.environ['USE_PROXYFIX'] == 'True' else False if 'SECRET_KEY' in os.environ: config['SECRET_KEY'] = os.environ['SECRET_KEY'] database_url = ( os.environ.get('DATABASE_URL', None) or # The following database settings are deprecated. os.environ.get('MONGO_URI', None) or os.environ.get('MONGODB_URI', None) or os.environ.get('MONGOHQ_URL', None) or os.environ.get('MONGOLAB_URI', None) ) # Use app config for DATABASE_URL if no env var from above override it config['DATABASE_URL'] = database_url or config['DATABASE_URL'] if 'DATABASE_NAME' in os.environ: config['DATABASE_NAME'] = os.environ['DATABASE_NAME'] if 'AUTH_REQUIRED' in os.environ: config['AUTH_REQUIRED'] = True if os.environ['AUTH_REQUIRED'] == 'True' else False if 'AUTH_PROVIDER' in os.environ: config['AUTH_PROVIDER'] = os.environ['AUTH_PROVIDER'] if 'ADMIN_USERS' in os.environ: config['ADMIN_USERS'] = os.environ['ADMIN_USERS'].split(',') if 'SIGNUP_ENABLED' in os.environ: config['SIGNUP_ENABLED'] = True if os.environ['SIGNUP_ENABLED'] == 'True' else False if 'CUSTOMER_VIEWS' in os.environ: config['CUSTOMER_VIEWS'] = True if os.environ['CUSTOMER_VIEWS'] == 'True' else False if 'OAUTH2_CLIENT_ID' in os.environ: config['OAUTH2_CLIENT_ID'] = os.environ['OAUTH2_CLIENT_ID'] if 'OAUTH2_CLIENT_SECRET' in os.environ: config['OAUTH2_CLIENT_SECRET'] = os.environ['OAUTH2_CLIENT_SECRET'] if 'ALLOWED_EMAIL_DOMAINS' in os.environ: config['ALLOWED_EMAIL_DOMAINS'] = os.environ['ALLOWED_EMAIL_DOMAINS'].split(',') if 'AZURE_TENANT' in os.environ: config['AZURE_TENANT'] = os.environ['AZURE_TENANT'] if 'GITHUB_URL' in os.environ: config['GITHUB_URL'] = os.environ['GITHUB_URL'] if 'ALLOWED_GITHUB_ORGS' in os.environ: config['ALLOWED_GITHUB_ORGS'] = os.environ['ALLOWED_GITHUB_ORGS'].split(',') if 'GITLAB_URL' in os.environ: config['GITLAB_URL'] = os.environ['GITLAB_URL'] if 'ALLOWED_GITLAB_GROUPS' in os.environ: config['ALLOWED_OIDC_ROLES'] = os.environ['ALLOWED_GITLAB_GROUPS'].split(',') if 'KEYCLOAK_URL' in os.environ: config['KEYCLOAK_URL'] = os.environ['KEYCLOAK_URL'] if 'KEYCLOAK_REALM' in os.environ: config['KEYCLOAK_REALM'] = os.environ['KEYCLOAK_REALM'] if 'ALLOWED_KEYCLOAK_ROLES' in os.environ: config['ALLOWED_OIDC_ROLES'] = os.environ['ALLOWED_KEYCLOAK_ROLES'].split(',') if 'OIDC_ISSUER_URL' in os.environ: config['OIDC_ISSUER_URL'] = os.environ['OIDC_ISSUER_URL'] if 'ALLOWED_OIDC_ROLES' in os.environ: config['ALLOWED_OIDC_ROLES'] = os.environ['ALLOWED_OIDC_ROLES'].split(',') if 'PINGFEDERATE_OPENID_ACCESS_TOKEN_URL' in os.environ: config['PINGFEDERATE_OPENID_ACCESS_TOKEN_URL'] = os.environ['PINGFEDERATE_OPENID_ACCESS_TOKEN_URL'].split( ',') if 'PINGFEDERATE_OPENID_PAYLOAD_USERNAME' in os.environ: config['PINGFEDERATE_OPENID_PAYLOAD_USERNAME'] = os.environ['PINGFEDERATE_OPENID_PAYLOAD_USERNAME'].split( ',') if 'PINGFEDERATE_OPENID_PAYLOAD_EMAIL' in os.environ: config['PINGFEDERATE_OPENID_PAYLOAD_EMAIL'] = os.environ['PINGFEDERATE_OPENID_PAYLOAD_EMAIL'].split(',') if 'PINGFEDERATE_OPENID_PAYLOAD_GROUP' in os.environ: config['PINGFEDERATE_OPENID_PAYLOAD_GROUP'] = os.environ['PINGFEDERATE_OPENID_PAYLOAD_GROUP'].split(',') if 'PINGFEDERATE_PUBKEY_LOCATION' in os.environ: config['PINGFEDERATE_PUBKEY_LOCATION'] = os.environ['PINGFEDERATE_PUBKEY_LOCATION'].split(',') if 'PINGFEDERATE_TOKEN_ALGORITHM' in os.environ: config['PINGFEDERATE_TOKEN_ALGORITHM'] = os.environ['PINGFEDERATE_TOKEN_ALGORITHM'].split(',') if 'CORS_ORIGINS' in os.environ: config['CORS_ORIGINS'] = os.environ['CORS_ORIGINS'].split(',') if 'MAIL_FROM' in os.environ: config['MAIL_FROM'] = os.environ['MAIL_FROM'] if 'SMTP_PASSWORD' in os.environ: config['SMTP_PASSWORD'] = os.environ['SMTP_PASSWORD'] if 'GOOGLE_TRACKING_ID' in os.environ: config['GOOGLE_TRACKING_ID'] = os.environ['GOOGLE_TRACKING_ID'] if 'PLUGINS' in os.environ: config['PLUGINS'] = os.environ['PLUGINS'].split(',') # Runtime config check if config['CUSTOMER_VIEWS'] and not config['AUTH_REQUIRED']: raise RuntimeError('Must enable authentication to use customer views') if config['CUSTOMER_VIEWS'] and not config['ADMIN_USERS']: raise RuntimeError('Customer views is enabled but there are no admin users') return config
threads -- number of threads to use. """ with contextlib.closing(multiprocessing.Pool(threads)) as threads_pool: threads_pool.map(functools.partial(_update_collection, collection = 'variants', reader = ), get_file_contig_pairs(variants_files)) ''' if __name__ == '__main__': global mongo_host global mongo_port global mongo_db_name args = argparser.parse_args() config = Config(os.path.dirname(os.path.realpath(__file__))) # Load default config config.from_object('config.default') # Load instance configuration if exists config.from_pyfile('config.py', silent=True) # Load configuration file specified in BRAVO_CONFIG_FILE environment variable if exists config.from_envvar('BRAVO_CONFIG_FILE', silent=True) mongo_host = config['MONGO']['host'] mongo_port = config['MONGO']['port'] mongo_db_name = config['MONGO']['name'] igv_cache_collection_name = config['IGV_CACHE_COLLECTION'] if args.command == 'genes': sys.stdout.write( 'Start loading genes to {} database.\n'.format(mongo_db_name)) load_gene_models(args.canonical_transcripts_file, args.omim_file, args.genenames_file, args.gencode_file)
from flask import Config from os import path config = Config('../') config.from_object('config') if path.exists(config["IMAGEMAGICK_CONVERT"]) is False: raise StandardError("No imagemagick 'convert' found. Check your config or/and install imagemagick.") from ifsApprover.DB import DB db = DB(config["DB_NAME"])