def load_config(): """ Loads the config files merging the defaults with the file defined in environ.LINTREVIEW_SETTINGS if it exists. """ config = Config(os.getcwd()) if 'LINTREVIEW_SETTINGS' in os.environ: config.from_envvar('LINTREVIEW_SETTINGS') elif os.path.exists(os.path.join(os.getcwd(), 'settings.py')): config.from_pyfile('settings.py') else: msg = ("Unable to load configuration file. Please " "either create ./settings.py or set LINTREVIEW_SETTINGS " "in your environment before running.") raise ImportError(msg) if config.get('LOGGING_CONFIG'): logging.config.fileConfig( config.get('LOGGING_CONFIG'), disable_existing_loggers=False) if config.get('SSL_CA_BUNDLE'): os.environ['REQUESTS_CA_BUNDLE'] = config.get('SSL_CA_BUNDLE') return config
def load_config(settings_file='./test_settings.py'): """ Loads the config files merging the defaults with the file defined in environ.PULLSBURY_SETTINGS if it exists. """ config = Config(os.getcwd()) if 'PULLSBURY_SETTINGS' in os.environ: config.from_envvar('PULLSBURY_SETTINGS') else: config.from_pyfile(settings_file) if config.get('LOGGING_CONFIG'): logging.config.fileConfig( config.get('LOGGING_CONFIG'), disable_existing_loggers=False) json_values = [ 'TEAMS', 'HAPPY_SLACK_EMOJIS', 'REPO_BLACKLIST', 'SLACK_CUSTOM_EMOJI_MAPPING' ] for value in json_values: config.update({ value: json.loads(config.get(value, '{}')) }) return config
def main(ctx, verbosity): configure_logger(verbosity) # Load the configurations from file config = Config(root_path=".") config.from_object(default_settings) config.from_envvar(ZUBBI_SETTINGS_ENV) # Validate the configuration tenant_sources_repo = config.get("TENANT_SOURCES_REPO") tenant_sources_file = config.get("TENANT_SOURCES_FILE") # Fail if both are set or none of both is set if ( not tenant_sources_file and not tenant_sources_repo or (tenant_sources_file and tenant_sources_repo) ): raise ScraperConfigurationError( "Either one of 'TENANT_SOURCES_REPO' " "and 'TENANT_SOURCES_FILE' must be set, " "but not both." ) # Store the config in click's context object to be available for subcommands ctx.obj = {"config": config} if ctx.invoked_subcommand is None: ctx.invoke(scrape)
def app_config(postgres_user_conf): from flask.config import Config from datacat.settings import testing conf = Config('') conf.from_object(testing) conf['DATABASE'] = postgres_user_conf return conf
def _get_config(): # Workaround to get an available config object before the app is initiallized # Only needed/used in top-level and class statements # https://stackoverflow.com/a/18138250/7597273 root_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) config = Config(root_path) config.from_object('config') return config
class Settings(object): config = Config(__name__) DEBUG = ConfigAttribute('DEBUG') APP_NAME = ConfigAttribute('APP_NAME') DJAODJIN_SECRET_KEY = ConfigAttribute('DJAODJIN_SECRET_KEY') def update(self, **updates): return self.config.update(updates)
def configs(self): if not hasattr(self, '_configs'): configs = Config(ROOT) resoure = ResourceLoader.get().get_resoure('settings.py') config_files = resoure.as_list() if config_files: for path in config_files: configs.from_pyfile(path) else: raise Exception('need a configuration file to start app') self._configs = configs return self._configs
def download_users(): """ Download users.xml file. """ import urllib2 from flask.config import Config config = Config(etc()) config.from_pyfile("deploy.cfg") response = urllib2.urlopen(config['USERS_URL']) users_xml = os.path.join('runtime', 'data', 'users.xml') if response.code == 200: with open(users_xml, 'w') as f: f.write(response.read())
def read_config() -> Config: config = Config("") env_settings_file = os.environ.get('DEBUGPROXY_SETTING_FILE', './config/default_settings.py') config.from_pyfile(env_settings_file) # update the config with any values found in the environment for key in config.keys(): env_value = os.environ.get(key) if env_value: config[key] = env_value return config
def __init__(self, basedir, db=None): """Create a container using a given base directory for the storage and socket. Optionally, provide a database object to use that instead of creating a new one (to, for example, reuse its internal locks). """ self.basedir = os.path.abspath(basedir) # Load the configuration. We're just reusing Flask's simple # configuration component here. self.config = Config(self.basedir) self.config.from_object('buildbot.config_default') self.config.from_pyfile('buildbot.cfg', silent=True) # Create the database. self.db = db or JobDB(self.basedir)
def load_config(config_obj=None): """ Load Greenwave configuration. It will load the configuration based on how the environment is configured. :return: A dict of Greenwave configuration. """ # Load default config, then override that with a config file config = Config(__name__) if config_obj is None: if os.getenv('TEST') == 'true': config_obj = 'greenwave.config.TestingConfig' elif os.getenv('DEV') == 'true' or os.getenv('DOCS') == 'true': config_obj = 'greenwave.config.DevelopmentConfig' else: config_obj = 'greenwave.config.ProductionConfig' if os.getenv('TEST') == 'true': default_config_file = os.path.join(os.getcwd(), 'conf', 'settings.py.example') elif os.getenv('DEV') == 'true': default_config_file = os.path.join(os.getcwd(), 'conf', 'settings.py') elif os.getenv('DOCS') == 'true': default_config_file = os.path.normpath( os.path.join(os.getcwd(), '..', 'conf', 'settings.py.example')) else: default_config_file = '/etc/greenwave/settings.py' # 1. Load default configuration. log.debug("config: Loading config from %r", config_obj) config.from_object(config_obj) # 2. Override default configuration with environment variables. if os.environ.get('GREENWAVE_SUBJECT_TYPES_DIR'): config['SUBJECT_TYPES_DIR'] = os.environ['GREENWAVE_SUBJECT_TYPES_DIR'] if os.environ.get('GREENWAVE_POLICIES_DIR'): config['POLICIES_DIR'] = os.environ['GREENWAVE_POLICIES_DIR'] # 3. Override default configuration and environment variables with custom config file. config_file = os.environ.get('GREENWAVE_CONFIG', default_config_file) log.debug("config: Extending config with %r", config_file) config.from_pyfile(config_file) if os.environ.get('SECRET_KEY'): config['SECRET_KEY'] = os.environ['SECRET_KEY'] return config
def test_settings_abs_path(self): """Check if the config obj is updated with default_settings when it is passed as a python file absolute path """ abs_path = getcwd() + '/arachne/tests/test_settings.py' test_app = self.create_app(settings=abs_path) # load config from pyfile flask_app = Flask(__name__) flask_app.config.from_object('arachne.default_settings') config_cls = Config(__name__) config_cls.from_pyfile(abs_path) # update config with the arachne default settings flask_app.config.update(config_cls) # test if config dicts are same self.assertEquals(test_app.config, flask_app.config)
def test_settings_abs_path(self): """Check if the config obj is updated with default_settings when it is passed as a python file absolute path """ abs_path = getcwd() + '/arachneserver/tests/test_settings.py' test_app = self.create_app(settings=abs_path) # since the object initialized created is always different # we ignore CRAWLER_PROCESS setting for test # if SCRAPY_VERSION >= (1, 0, 0): del test_app.config['CRAWLER_PROCESS'] # load config from pyfile flask_app = Flask(__name__) flask_app.config.from_object('arachneserver.default_settings') config_cls = Config(__name__) config_cls.from_pyfile(abs_path) # update config with the server default settings flask_app.config.update(config_cls) # test if config dicts are same self.assertEquals(test_app.config, flask_app.config)
def make_config(): cfg = Config('') cfg.from_object('datacat.settings.default') cfg.from_envvar('DATACAT_SETTINGS', silent=True) return cfg
def load_config(self): config = Config(current_app.root_path) config.from_pyfile(path.join(path.dirname(current_app.root_path), 'config.py')) for option, value in config.items(): setattr(self, option, value)
def load_default_config_from_pyfile(self, filename): config = Config(root_path=self.root_path) config.from_pyfile(filename) for key, default_value in config.items(): self.config.setdefault(key, default_value)
import os import datetime from flask_login import UserMixin from flask.config import Config # from werkzeug.security import check_password_hash, generate_password_hash from itsdangerous import URLSafeTimedSerializer, BadSignature, SignatureExpired from pytwis import Pytwis, pytwis_constants from .config import config_by_name # BUGBUG: Read the configuration of the Flask app again since we can't # find a way to access the configuration outside an application context. config_name = os.getenv('PYTWASK_ENV', 'dev') # pylint: disable=invalid-name app_config = Config(None) # pylint: disable=invalid-name app_config.from_object(config_by_name[config_name]) # Connect to the local Redis database. twis = Pytwis(hostname=app_config['REDIS_DB_HOSTNAME'], # pylint: disable=invalid-name port=app_config['REDIS_DB_PORT'], socket=app_config['REDIS_DB_SOCKET'], db=app_config['REDIS_DB_INDEX'], password=app_config['REDIS_DB_PASSWORD']) class Tweet(): # pylint: disable=too-few-public-methods """This 'Tweet' class encapsulates all the information related to one tweet. Note that it will convert the posted UNIX timestamp into a datetime. """ def __init__(self, username, post_unix_time, body):
def __init__(self, config=None): self.config = Config() if config is not None: self.config.update(config)
from __future__ import unicode_literals import os from flask import Flask from flask.config import Config from flaskext.csrf import csrf # config config = Config(None, Flask.default_config) config.from_object('rentmybike.settings.default') if os.getenv('RENTMYBIKE_ENV'): config.from_object('rentmybike.settings.' + os.getenv('RENTMYBIKE_ENV')) # app from application import RentMyBike # deferred app = RentMyBike() if app.config['DUMMY_DATA']: app.add_dummy_data() csrf(app) # controllers import controllers # deferred
from flask.config import Config import os ds_settings = os.getenv( "DS_SETTINGS", "project.config.data_science_config.DsDevelopmentConfig") ds_config = Config(None) ds_config.from_object(ds_settings)
from flask.config import Config import os APP_DIR = os.path.abspath(os.path.dirname(__file__)) APP_DIR = os.path.abspath(os.path.join(APP_DIR, os.pardir)) api_dir = os.path.join(APP_DIR, '') config = Config(api_dir) config_value = os.environ.get('CONFIG') config_file = '' # if config_value: # print(f'** config: \n{config_value}') # config_filename = "_config" # full_config_name = config_filename + '.py' # config_file = os.path.join(api_dir, full_config_name) # print(f'** write to {config_file}') # with open(config_file, "w") as file: # file.write(config_value) # file.close() # else: config_filename = os.environ.get('CONFIG_NAME') or 'config' full_config_name = config_filename + '.py' config_file = os.path.join(api_dir, full_config_name) print(f'-> config: {config_file}') # log_dir = os.path.join(api_dir, os.pardir, 'logs') # if not os.path.exists(log_dir): # os.makedirs(log_dir) #
import datetime from flask_login import UserMixin from flask.config import Config #from werkzeug.security import check_password_hash, generate_password_hash from itsdangerous import URLSafeTimedSerializer, BadSignature, SignatureExpired import os from pytwis import Pytwis from .config import config_by_name # BUGBUG: Read the configuration of the Flask app again since we can't # find a way to access the configuration outside an application context. config_name = os.getenv('PYTWASK_ENV', 'dev') app_config = Config(None) app_config.from_object(config_by_name[config_name]) # Connect to the local Redis database. twis = Pytwis(hostname=app_config['REDIS_DB_HOSTNAME'], port=app_config['REDIS_DB_PORT'], db=app_config['REDIS_DB_INDEX'], password=app_config['REDIS_DB_PASSWORD']) class Tweet(): """Tweet class""" def __init__(self, username, post_unix_time, body): self.username = username self.post_datetime = datetime.datetime.fromtimestamp(post_unix_time) self.body = body
}, 'short': { 'format': '[BIS %(asctime)s%(msecs)d-%(levelname).1s][%(name)-9s]%(message)s', 'datefmt': '%M%S' # miliseconds will be added by the format string }, 'long': { 'format': '[%(asctime)s:%(levelname)s][%(name)-10s]%(message)s' } }, 'handlers': { 'console': { 'class': 'logging.StreamHandler', 'formatter': 'simple', 'stream': 'ext://sys.stderr' } }, 'loggers': { 'harpgest': { 'handlers': ['console'], 'level': logging.INFO, } } } config = Config(os.getcwd()) config.from_object(DefaultConfig) config.from_pyfile(os.path.join(os.getcwd(), 'app.cfg'), silent=True)
def _get_config(): root_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) cfg = Config(root_path) cfg.from_object('config') return cfg
import boto3 from boto.s3.connection import S3Connection from depot.fields.sqlalchemy import UploadedFileField import inflection from flask.config import Config from flask_migrate import Migrate from sqlalchemy import Column, Integer, String from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.ext.declarative import declared_attr from sqlalchemy_wrapper import SQLAlchemy from sa_types import EnumChoiceType config_name = 'coding_challenge_restful.settings.Config' config = Config('') config.from_object(config_name) s3_client = boto3.client( 's3', # Hard coded strings as credentials, not recommended. aws_access_key_id=config['AWS_ACCESS_KEY'], aws_secret_access_key=config['AWS_SECRET_KEY']) isolation_level = 'READ COMMITTED' db = SQLAlchemy(uri=config['DATABASE_URL'], isolation_level=isolation_level) migrate = Migrate(compare_type=True) # Create Models Model = db.Model
def make_config(self, configuration): self.config = Config(None, self.default_config) if (configuration) : self.config.from_object(configuration)
from flask.config import Config from mabolab.core.global_obj import Global CENTRAL_CONFIG = 'C:/MTP/mabotech/maboss1.3.0/maboss/conf/maboss_config.py' settings = Config("") settings.from_pyfile(CENTRAL_CONFIG) settings['APP_NAME'] = "monitor_bli" g = Global(settings) db = g.get_db('postgresql') ora = g.get_db('oracle') log = g.get_logger() def dbtest(serialno): sql = """select status, lastupdateon from cob_t_serial_no_workstation where serialno = '%s' and workstation = '42700' order by id desc""" % (serialno) rtn = ora.execute(sql) print rtn.fetchone()
from urllib.parse import urlparse import numpy as np import imagehash from jinja2 import Environment, PackageLoader import luigi from luigi.contrib import redis_store import networkx as nx from PIL import Image from flask.config import Config import requests import twarc import json2csv config = Config(os.path.dirname(__file__)) config.from_pyfile('dnflow.cfg') logging.getLogger().setLevel(logging.WARN) logging.getLogger('').setLevel(logging.WARN) logging.getLogger('luigi-interface').setLevel(logging.WARN) def time_hash(digits=6): """Generate an arbitrary hash based on the current time for filenames.""" hash = hashlib.sha1() hash.update(str(time.time()).encode()) t = time.localtime() dt = '%s%02d%02d%02d%02d' % (t.tm_year, t.tm_mon, t.tm_mday, t.tm_hour, t.tm_min) return '%s-%s' % (dt, hash.hexdigest()[:digits])
#!/usr/bin/env python import os import sys import time import logging from flask.config import Config import boto.sqs from boto.sqs.message import RawMessage from boto import exception LOG = logging.getLogger('alerta.sqs') config = Config('/') config.from_pyfile('/etc/alertad.conf', silent=True) config.from_envvar('ALERTA_SVR_CONF_FILE', silent=True) DEFAULT_AWS_REGION = 'eu-west-1' DEFAULT_AWS_SQS_QUEUE = 'alerts' AWS_REGION = os.environ.get('AWS_REGION') or config.get( 'AWS_REGION', DEFAULT_AWS_REGION) AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID') or config.get( 'AWS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY') or config.get( 'AWS_SECRET_ACCESS_KEY') AWS_SQS_QUEUE = os.environ.get('AWS_SQS_QUEUE') or config.get( 'AWS_SQS_QUEUE', DEFAULT_AWS_SQS_QUEUE)
from flask.config import Config from celery.utils.log import get_task_logger from ckanpackager.tasks.url_package_task import UrlPackageTask from ckanpackager.tasks.datastore_package_task import DatastorePackageTask from ckanpackager.tasks.dwc_archive_package_task import DwcArchivePackageTask config = Config(__file__) config.from_object('ckanpackager.config_defaults') config.from_envvar('CKANPACKAGER_CONFIG') from celery import Celery app = Celery('tasks', broker=config['CELERY_BROKER']) app.conf.CELERY_DISABLE_RATE_LIMITS = True app.conf.CELERY_ACCEPT_CONTENT = ['json'] app.conf.CELERY_TASK_SERIALIZER = 'json' app.conf.CELERY_CREATE_MISSING_QUEUES = True app.conf.CELERY_DEFAULT_QUEUE = 'slow' @app.task def run_task(task, request): """ Run/enqueue the given task for the given request Note that the request should be validated before this is called. @param task: Name of the task. One of package_url, package_dwc_archive or package_datastore @param request: Dictionary containing the request """