def includeme(config): settings = config.registry.settings config.scan('pyramid_request_log.request_log') if 'pyramid_request_log.pattern' in settings: unlog_pattern = aslist(settings['pyramid_request_log.pattern']) if not unlog_pattern: log.info('Pyramid-Request-Log will ignore no key: ' 'variable define but empty') else: log.info('Pyramid-Request-Log will ignore keys: %s', unlog_pattern) re_compile = re.compile('({})'.format(')|('.join(unlog_pattern))) request_log.unlog_pattern = re_compile else: log.warning('No pyramid_request_log.pattern found in settings') if 'pyramid_request_log.ignore_route' in settings: unlog_route = aslist(settings['pyramid_request_log.ignore_route']) if not unlog_route: log.info('Pyramid-Request-Log will ignore no route: ' 'variable define but empty') else: log.info('Pyramid-Request-Log will ignore routes: %s', unlog_route) re_compile = re.compile('({})'.format(')|('.join(unlog_route))) request_log.unlog_route = re_compile key = 'pyramid_request_log.authenticated_id' if key in settings: request_log.authenticated_id = settings[key]
def load_settings(registry): return Settings( swagger12_handler=build_swagger12_handler( registry.settings.get('pyramid_swagger.schema12')), swagger20_handler=build_swagger20_handler(), validate_request=asbool( registry.settings.get( 'pyramid_swagger.enable_request_validation', True, )), validate_response=asbool( registry.settings.get( 'pyramid_swagger.enable_response_validation', True, )), validate_path=asbool( registry.settings.get( 'pyramid_swagger.enable_path_validation', True, )), exclude_paths=get_exclude_paths(registry), exclude_routes=set( aslist( registry.settings.get('pyramid_swagger.exclude_routes', ) or [])), prefer_20_routes=set( aslist( registry.settings.get('pyramid_swagger.prefer_20_routes') or [])), )
def depsjs(request): path_to_source = {} settings = request.registry.settings pyramid_closure = settings.get("pyramid_closure") roots = pyramid_closure.get("roots") if pyramid_closure else \ settings.get("pyramid_closure.roots") if roots is None: roots = [] elif isinstance(roots, basestring): roots = aslist(roots) for root in roots: path_to_source.update(depswriter._GetRelativePathToSourceDict(root)) roots_with_prefix = pyramid_closure.get("roots_with_prefix") if \ pyramid_closure else \ settings.get("pyramid_closure.roots_with_prefix") if roots_with_prefix is None: roots_with_prefix = [] elif isinstance(roots_with_prefix, basestring): roots_with_prefix = [aslist(roots_with_prefix)] for prefix, root in pairwise(roots_with_prefix): path_to_source.update( depswriter._GetRelativePathToSourceDict( root, prefix=request.static_url(prefix))) request.response.content_type = 'text/javascript' return depswriter.MakeDepsFile(path_to_source)
def includeme(config): settings = config.get_settings() # Add CORS settings to the base cliquet Service class. cors_origins = settings["cliquet.cors_origins"] Service.cors_origins = tuple(aslist(cors_origins)) Service.default_cors_headers = ("Backoff", "Retry-After", "Alert", "Content-Length") Service.error_handler = lambda self, e: errors.json_error_handler(e) # Heartbeat registry. config.registry.heartbeats = {} # Public settings registry. config.registry.public_settings = {"cliquet.batch_max_requests"} # Setup components. for step in aslist(settings["cliquet.initialization_sequence"]): step_func = config.maybe_dotted(step) step_func(config) # Setup cornice. config.include("cornice") # Scan views. config.scan("cliquet.views") # Give sign of life. msg = "%(cliquet.project_name)s %(cliquet.project_version)s starting." logger.info(msg % settings)
def includeme(config): if hasattr(config.registry, "tonnikala_renderer_factory"): return config.registry.tonnikala_renderer_factory = TonnikalaRendererFactory() config.add_directive("add_tonnikala_extensions", add_tonnikala_extensions) config.add_directive("add_tonnikala_search_paths", add_tonnikala_search_paths) config.add_directive("set_tonnikala_reload", set_tonnikala_reload) settings = config.registry.settings if "tonnikala.extensions" in settings: extensions = settings["tonnikala.extensions"] if not is_nonstr_iter(extensions): extensions = aslist(extensions, flatten=True) config.add_tonnikala_extensions(*extensions) if "tonnikala.search_paths" in settings: paths = settings["tonnikala.search_paths"] if not is_nonstr_iter(paths): paths = aslist(paths, flatten=True) config.add_tonnikala_search_paths(*paths) tk_reload = settings.get("tonnikala.reload") if tk_reload is None: tk_reload = settings.get("pyramid.reload_templates") config.set_tonnikala_reload(asbool(tk_reload))
def includeme(config): load_default_settings(config, DEFAULT_SETTINGS) settings = config.get_settings() # Add CORS settings to the base cliquet Service class. cors_origins = settings['cliquet.cors_origins'] Service.cors_origins = tuple(aslist(cors_origins)) Service.default_cors_headers = ('Backoff', 'Retry-After', 'Alert') # Heartbeat registry. config.registry.heartbeats = {} # Setup components. for step in aslist(settings['cliquet.initialization_sequence']): step_func = config.maybe_dotted(step) step_func(config) # Setup cornice. config.include("cornice") # Scan views. config.scan("cliquet.views") # Give sign of life. msg = "%(cliquet.project_name)s %(cliquet.project_version)s starting." logger.info(msg % settings)
def configure(cls, settings): kwargs = super(ConfigAccessBackend, cls).configure(settings) if asbool(settings.get('auth.zero_security_mode', False)): LOG.warn("Using deprecated option 'auth.zero_security_mode' " "(replaced by 'pypi.default_read' and " "'pypi.default_write'") kwargs['default_read'] = [Everyone] kwargs['default_write'] = [Authenticated] kwargs['settings'] = settings kwargs['admins'] = aslist(settings.get('auth.admins', [])) user_groups = defaultdict(list) group_map = {} # Build dict that maps users to list of groups for key, value in settings.iteritems(): if not key.startswith('group.'): continue group_name = key[len('group.'):] members = aslist(value) group_map[group_name] = members for member in members: user_groups[member].append(group_name) kwargs['group_map'] = group_map kwargs['user_groups'] = user_groups return kwargs
def includeme(config): """Plug daybed-browserid to daybed""" settings = config.get_settings() if 'browserid.audiences' not in settings: raise ConfigurationError( 'Missing browserid.audiences settings. This is needed for ' 'security reasons. See https://developer.mozilla.org/en-US/docs/' 'Persona/Security_Considerations for details.') if 'browserid.trusted_issuers' not in settings: raise ConfigurationError( 'Missing browserid.trusted_issuers settings. This is needed for ' 'security reasons. See https://developer.mozilla.org/en-US/docs/' 'Persona/Security_Considerations for details.') verifier_url = settings.get("browserid.verifier_url", None) audiences = aslist(settings['browserid.audiences']) trusted_issuers = aslist(settings['browserid.trusted_issuers']) config.registry['browserid.verifier_url'] = verifier_url config.registry['browserid.audiences'] = audiences config.registry['browserid.trusted_issuers'] = trusted_issuers # Create a backend backend_class = config.maybe_dotted( settings.get( 'browserid.backend', settings['daybed.backend'].replace('daybed', 'daybed_browserid') ) ) config.registry.browserid_db = backend_class.load_from_config(config) config.scan("daybed_browserid.views")
def factory(handler, registry): get = morph.pick(registry.settings, prefix=CONFIG_PREFIX).get conf = aadict() conf.enabled = asbool(get('enabled', True)) conf.include = [globre.compile(el, globre.EXACT) for el in aslist(get('include', []))] conf.exclude = [globre.compile(el, globre.EXACT) for el in aslist(get('exclude', []))] conf.reparse = aslist(get('reparse-methods', DEFAULT_REPARSE_METHODS)) conf.name = get('attribute-name', DEFAULT_ATTRIBUTE_NAME) conf.deep = asbool(get('combine.deep', True)) conf.reqdict = asbool(get('require-dict', True)) conf.failunk = asbool(get('fail-unknown', True)) conf.ndict = asbool(get('native-dict', False)) conf.error = get('error-handler', None) if conf.error: conf.error = asset.symbol(conf.error) conf.xfmt = asbool(get('xml.enable', True)) conf.jfmt = asbool(get('json.enable', True)) conf.yfmt = asbool(get('yaml.enable', bool(yaml or get('yaml.parser')))) if conf.jfmt: conf.jparser = get('json.parser', None) if conf.jparser: conf.jparser = asset.symbol(conf.jparser) if conf.yfmt: conf.yparser = asset.symbol(get('yaml.parser', 'yaml.load')) if conf.xfmt: conf.xparser = asset.symbol(get('xml.parser', 'xml.etree.ElementTree.fromstring')) def input_tween(request): return process(handler, request, conf) return input_tween
def __init__(self, storage, cache_servers=None, cache_key_prefix="", cache_pool_size=None, cache_pool_timeout=60, cached_collections=(), cache_only_collections=(), cache_lock=False, cache_lock_ttl=None, **kwds): self.storage = storage self.cache = MemcachedClient(cache_servers, cache_key_prefix, cache_pool_size, cache_pool_timeout) self.cached_collections = {} for collection in aslist(cached_collections): colmgr = CachedManager(self, collection) self.cached_collections[collection] = colmgr self.cache_only_collections = {} for collection in aslist(cache_only_collections): colmgr = CacheOnlyManager(self, collection) self.cache_only_collections[collection] = colmgr self.cache_lock = cache_lock if cache_lock_ttl is None: self.cache_lock_ttl = DEFAULT_CACHE_LOCK_TTL else: self.cache_lock_ttl = cache_lock_ttl # Keep a threadlocal to track the currently-held locks. # This is needed to make the read locking API reentrant. self._tldata = threading.local()
def configure(cls, settings): super(ConfigAccessBackend, cls).configure(settings) cls._settings = settings cls.zero_security_mode = asbool(settings.get('auth.zero_security_mode', False)) cls.admins = aslist(settings.get('auth.admins', [])) cls.user_groups = defaultdict(list) cls.group_map = {} if cls.zero_security_mode: cls.ROOT_ACL = [ (Allow, Everyone, 'login'), (Allow, Everyone, 'read'), (Allow, Authenticated, 'write'), (Allow, 'admin', ALL_PERMISSIONS), (Deny, Everyone, ALL_PERMISSIONS), ] else: cls.ROOT_ACL = IAccessBackend.ROOT_ACL # Build dict that maps users to list of groups for key, value in settings.iteritems(): if not key.startswith('group.'): continue group_name = key[len('group.'):] members = aslist(value) cls.group_map[group_name] = members for member in members: cls.user_groups[member].append(group_name)
def setup_listeners(config): write_actions = (ACTIONS.CREATE, ACTIONS.UPDATE, ACTIONS.DELETE) settings = config.get_settings() listeners = aslist(settings['event_listeners']) for name in listeners: logger.info('Setting up %r listener') prefix = 'event_listeners.%s.' % name try: listener_mod = config.maybe_dotted(name) prefix = 'event_listeners.%s.' % name.split('.')[-1] listener = listener_mod.load_from_config(config, prefix) except (ImportError, AttributeError): listener_mod = config.maybe_dotted(settings[prefix + 'use']) listener = listener_mod.load_from_config(config, prefix) actions = aslist(settings.get(prefix + 'actions', '')) or write_actions resource_names = aslist(settings.get(prefix + 'resources', '')) decorated = _filter_events(listener, actions, resource_names) if ACTIONS.READ in actions: config.add_subscriber(decorated, ResourceRead) if len(actions) == 1: return config.add_subscriber(decorated, ResourceChanged)
def __init__( self, api_version: str = "v1", api_name: str = None, default_limit: int = 100, max_limit: int = 1000, writable_attributes: List[str] = None, read_only_attributes: List[str] = None, allowed_origins: Tuple[str] = None, disable_cors: bool = False, authenticate_fn: str = None, ) -> None: self.api_version = api_version self.api_name = api_name self.authenticate_fn = authenticate_fn self.default_limit = int(default_limit) self.max_limit = int(max_limit) if writable_attributes is None: self.writable_attributes: List[str] = [] else: self.writable_attributes = aslist(writable_attributes) if read_only_attributes is None: self.read_only_attributes: List[str] = [] else: self.read_only_attributes = aslist(read_only_attributes) self.disable_cors = asbool(disable_cors) self.allowed_origins = allowed_origins and aslist(allowed_origins)
def configure(cls, settings): kwargs = super(ConfigAccessBackend, cls).configure(settings) data = {} users = {} for key, value in six.iteritems(settings): if not key.startswith("user."): continue users[key[len("user.") :]] = value data["users"] = users data["admins"] = aslist(settings.get("auth.admins", [])) groups = {} for key, value in six.iteritems(settings): if not key.startswith("group."): continue groups[key[len("group.") :]] = aslist(value) data["groups"] = groups packages = {} for key, value in six.iteritems(settings): pieces = key.split(".") if len(pieces) != 4 or pieces[0] != "package": continue _, package, mode, entity = pieces pkg_perms = packages.setdefault(package, {"users": {}, "groups": {}}) if mode == "user": pkg_perms["users"][entity] = cls._perms_from_short(value) elif mode == "group": pkg_perms["groups"][entity] = cls._perms_from_short(value) data["packages"] = packages kwargs["data"] = data return kwargs
def setup_listeners(config): # Register basic subscriber predicates, to filter events. config.add_subscriber_predicate('for_actions', EventActionFilter) config.add_subscriber_predicate('for_resources', EventResourceFilter) write_actions = (ACTIONS.CREATE, ACTIONS.UPDATE, ACTIONS.DELETE) settings = config.get_settings() project_name = settings.get('project_name', '') listeners = aslist(settings['event_listeners']) for name in listeners: logger.info('Setting up %r listener' % name) prefix = 'event_listeners.%s.' % name try: listener_mod = config.maybe_dotted(name) prefix = 'event_listeners.%s.' % name.split('.')[-1] listener = listener_mod.load_from_config(config, prefix) except (ImportError, AttributeError): module_setting = prefix + "use" # Read from ENV or settings. module_value = utils.read_env(project_name + "." + module_setting, settings.get(module_setting)) listener_mod = config.maybe_dotted(module_value) listener = listener_mod.load_from_config(config, prefix) # If StatsD is enabled, monitor execution time of listeners. if getattr(config.registry, "statsd", None): statsd_client = config.registry.statsd key = 'listeners.%s' % name listener = statsd_client.timer(key)(listener.__call__) # Optional filter by event action. actions_setting = prefix + "actions" # Read from ENV or settings. actions_value = utils.read_env(project_name + "." + actions_setting, settings.get(actions_setting, "")) actions = aslist(actions_value) if len(actions) > 0: actions = ACTIONS.from_string_list(actions) else: actions = write_actions # Optional filter by event resource name. resource_setting = prefix + "resources" # Read from ENV or settings. resource_value = utils.read_env(project_name + "." + resource_setting, settings.get(resource_setting, "")) resource_names = aslist(resource_value) # Pyramid event predicates. options = dict(for_actions=actions, for_resources=resource_names) if ACTIONS.READ in actions: config.add_subscriber(listener, ResourceRead, **options) if len(actions) == 1: return config.add_subscriber(listener, ResourceChanged, **options)
def _parse_settings(settings): rawes_args = {} defaults = { 'url': 'http://localhost:9200', 'timeout': 30, 'path': '', 'json_encoder': encode_date_optional_time, } rawes_args = defaults.copy() # set string settings for short_key_name in ('path',): key_name = 'rawes.%s' % (short_key_name,) if key_name in settings: rawes_args[short_key_name] = \ settings.get(key_name, defaults.get(short_key_name)) # set list settings for short_key_name in ('url',): key_name = 'rawes.%s' % (short_key_name,) if key_name in settings: rawes_args[short_key_name] = \ (aslist(settings.get(key_name, defaults.get(short_key_name))) if len(aslist(settings.get(key_name, defaults.get(short_key_name)))) > 1 else settings.get(key_name, defaults.get(short_key_name)).strip()) # integer settings for short_key_name in ('timeout',): key_name = 'rawes.%s' % (short_key_name,) if key_name in settings: rawes_args[short_key_name] = \ int(settings.get(key_name, defaults.get(short_key_name))) # function settings for short_key_name in ('json_encoder',): key_name = 'rawes.%s' % (short_key_name,) r = DottedNameResolver() if key_name in settings: rawes_args[short_key_name] = \ r.resolve(settings.get(key_name)) for short_key_name in ('json_decoder',): key_name = 'rawes.%s' % (short_key_name,) r = DottedNameResolver() if key_name in settings: rawes_args[short_key_name] = \ r.resolve(settings.get(key_name))().decode # removed settings for short_key_name in ('connection_type', 'except_on_error'): key_name = 'rawes.%s' % (short_key_name,) if key_name in settings: warnings.warn( '%s is no longer supported, please remove from your settings.', UserWarning ) return rawes_args
def parse_options_from_settings(settings, settings_prefix, maybe_dotted): """ Parse options for use with Mako's TemplateLookup from settings.""" def sget(name, default=None): return settings.get(settings_prefix + name, default) reload_templates = sget('reload_templates', None) if reload_templates is None: reload_templates = settings.get('pyramid.reload_templates', None) reload_templates = asbool(reload_templates) directories = sget('directories', []) module_directory = sget('module_directory', None) input_encoding = sget('input_encoding', 'utf-8') error_handler = sget('error_handler', None) default_filters = sget('default_filters', 'h') imports = sget('imports', None) future_imports = sget('future_imports', None) strict_undefined = asbool(sget('strict_undefined', False)) preprocessor = sget('preprocessor', None) if not is_nonstr_iter(directories): # Since we parse a value that comes from an .ini config, # we treat whitespaces and newline characters equally as list item separators. directories = aslist(directories, flatten=True) directories = [abspath_from_asset_spec(d) for d in directories] if module_directory is not None: module_directory = abspath_from_asset_spec(module_directory) if error_handler is not None: error_handler = maybe_dotted(error_handler) if default_filters is not None: if not is_nonstr_iter(default_filters): default_filters = aslist(default_filters) if imports is not None: if not is_nonstr_iter(imports): imports = aslist(imports, flatten=False) if future_imports is not None: if not is_nonstr_iter(future_imports): future_imports = aslist(future_imports) if preprocessor is not None: preprocessor = maybe_dotted(preprocessor) return dict( directories=directories, module_directory=module_directory, input_encoding=input_encoding, error_handler=error_handler, default_filters=default_filters, imports=imports, future_imports=future_imports, filesystem_checks=reload_templates, strict_undefined=strict_undefined, preprocessor=preprocessor, )
def configure(cls, settings): """ Configure the access backend with app settings """ return { 'default_read': aslist(settings.get('pypi.default_read', ['authenticated'])), 'default_write': aslist(settings.get('pypi.default_write', [])), 'cache_update': aslist(settings.get('pypi.cache_update', ['authenticated'])), }
def includeme(config): """ This function returns a Pyramid WSGI application.""" def contextual_route(name, route, from_root=True): config.add_route('contextual_' + name, '/{discussion_slug}' + route) if from_root: config.add_route(name, route) contextual_route('login', '/login') contextual_route('login_forceproviders', '/login_showallproviders') contextual_route('logout', '/logout') # type in u(sername), id, email, {velruse-id-type} config.add_route('profile_user', '/user/{type}/{identifier}') config.add_route('avatar', '/user/{type}/{identifier}/avatar/{size:\d+}') contextual_route('register', '/register') contextual_route('user_confirm_email', '/users/email_confirm/{ticket}') # Do we want this? # config.add_route('profile_search', '/usernames/{user_name}') # TODO: secure next three methods to avoid spamming the user. contextual_route('confirm_emailid_sent', '/confirm_email_sent_id/{email_account_id:\d+}') contextual_route('confirm_email_sent', '/confirm_email_sent/{email}') contextual_route('password_change_sent', '/password_change_sent/{profile_id:\d+}') contextual_route('request_password_change', '/req_password_change') contextual_route('do_password_change', '/do_password_change/{ticket}') contextual_route('welcome', '/welcome/{ticket}') contextual_route('finish_password_change', '/finish_password_change') config.add_route('contextual_social_auth', '/{discussion_slug}/login/{backend}') contextual_route('add_social_account', '/add_account/{backend}') # determine which providers we want to configure settings = config.get_settings() providers = aslist(settings['login_providers']) config.add_settings(login_providers=providers) config.add_settings(trusted_login_providers=aslist( settings.get('trusted_login_providers', ''))) if not any(providers): sys.stderr.write('no login providers configured, double check ' 'your ini file and add a few') settings = config.registry.settings for name in ('SOCIAL_AUTH_AUTHENTICATION_BACKENDS', 'SOCIAL_AUTH_USER_FIELDS', 'SOCIAL_AUTH_PROTECTED_USER_FIELDS', 'SOCIAL_AUTH_FIELDS_STORED_IN_SESSION'): settings[name] = aslist(settings.get(name, '')) for k in settings.iterkeys(): if k.endswith("_SCOPE") and k.startswith("SOCIAL_AUTH_"): settings[k] = aslist(settings.get(k, '')) config.add_request_method('assembl.auth.social_auth.get_user', 'user', reify=True) config.include('social.apps.pyramid_app') config.scan() config.scan('social.apps.pyramid_app')
def includeme(config): """ This function returns a Pyramid WSGI application.""" def contextual_route(name, route, from_root=True): config.add_route('contextual_'+name, '/{discussion_slug}'+route) if from_root: config.add_route(name, route) contextual_route('login', '/login') contextual_route('login_forceproviders', '/login_showallproviders') contextual_route('logout', '/logout') # type in u(sername), id, email, {velruse-id-type} config.add_route('profile_user', '/user/{type}/{identifier}') config.add_route('avatar', '/user/{type}/{identifier}/avatar/{size:\d+}') contextual_route('register', '/register') contextual_route('user_confirm_email', '/users/email_confirm/{ticket}') # Do we want this? # config.add_route('profile_search', '/usernames/{user_name}') # TODO: secure next three methods to avoid spamming the user. contextual_route('confirm_emailid_sent', '/confirm_email_sent_id/{email_account_id:\d+}') contextual_route('confirm_email_sent', '/confirm_email_sent/{email}') contextual_route('password_change_sent', '/password_change_sent/{profile_id:\d+}') contextual_route('request_password_change', '/req_password_change') contextual_route('do_password_change', '/do_password_change/{ticket}') contextual_route('welcome', '/welcome/{ticket}') contextual_route('finish_password_change', '/finish_password_change') config.add_route('contextual_social_auth', '/{discussion_slug}/login/{backend}') contextual_route('add_social_account', '/add_account/{backend}') # determine which providers we want to configure settings = config.get_settings() providers = aslist(settings['login_providers']) config.add_settings(login_providers=providers) config.add_settings(trusted_login_providers=aslist( settings.get('trusted_login_providers', ''))) if not any(providers): sys.stderr.write('no login providers configured, double check ' 'your ini file and add a few') settings = config.registry.settings for name in ('SOCIAL_AUTH_AUTHENTICATION_BACKENDS', 'SOCIAL_AUTH_USER_FIELDS', 'SOCIAL_AUTH_PROTECTED_USER_FIELDS', 'SOCIAL_AUTH_FIELDS_STORED_IN_SESSION'): settings[name] = aslist(settings.get(name, '')) for k in settings.iterkeys(): if k.endswith("_SCOPE") and k.startswith("SOCIAL_AUTH_"): settings[k] = aslist(settings.get(k, '')) config.add_request_method( 'assembl.auth.social_auth.get_user', 'user', reify=True) config.include('social.apps.pyramid_app') config.scan() config.scan('social.apps.pyramid_app')
def load_from_config(config, prefix=''): settings = config.get_settings() collections = aslist(settings.get(prefix + 'collections', '')) changes_bucket = settings.get(prefix + 'bucket', 'monitor') changes_collection = settings.get(prefix + 'collection', 'changes') changes_principals = aslist(settings.get(prefix + 'principals', Everyone)) return Listener(collections, changes_bucket, changes_collection, changes_principals)
def factory(handler, registry): get = registry.settings.get on = [e.upper() for e in aslist(get('methodrewrite.on', 'GET POST'))] to = [e.upper() for e in aslist(get('methodrewrite.to', ' '.join(HTTP_METHODS)))] name = get('methodrewrite.param', '_method') def methodrewrite_tween(request): if request.method.upper() in on and name in request.params: meth = request.params.get(name, '').upper() if meth and ( not to or meth in to ): request.method = meth return handler(request) return methodrewrite_tween
def includeme(config): SOCIAL_AUTH_SETTINGS['SOCIAL_AUTH_AUTHENTICATION_BACKENDS'] = aslist( config.registry.settings['psa.authentication_backends']) SOCIAL_AUTH_KEYS = { 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY': config.registry.settings['psa.google.key'], 'SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET': config.registry.settings['psa.google.secret'], 'SOCIAL_AUTH_TWITTER_KEY': config.registry.settings['psa.twitter.key'], 'SOCIAL_AUTH_TWITTER_SECRET': config.registry.settings['psa.twitter.secret'], 'SOCIAL_AUTH_FACEBOOK_APP_KEY': config.registry.settings['psa.facebook.app.key'], 'SOCIAL_AUTH_FACEBOOK_APP_SECRET': config.registry.settings['psa.facebook.app.secret'], 'SOCIAL_AUTH_FACEBOOK_APP_NAMESPACE': config.registry.settings['psa.facebook.app.namespace'], 'SOCIAL_AUTH_YAHOO_OAUTH_KEY': config.registry.settings['psa.yahoo.key'], 'SOCIAL_AUTH_YAHOO_OAUTH_SECRET': config.registry.settings['psa.yahoo.secret'], 'SOCIAL_AUTH_LINKEDIN_KEY': config.registry.settings['psa.linkedin.key'], 'SOCIAL_AUTH_LINKEDIN_SECRET': config.registry.settings['psa.linkedin.secret'], 'SOCIAL_AUTH_LINKEDIN_SCOPE': aslist(config.registry.settings['psa.linkedin.scope']), 'SOCIAL_AUTH_GITHUB_KEY': config.registry.settings['psa.github.key'], 'SOCIAL_AUTH_GITHUB_SECRET': config.registry.settings['psa.github.secret'], 'SOCIAL_AUTH_INSTAGRAM_KEY': config.registry.settings['psa.instagram.key'], 'SOCIAL_AUTH_INSTAGRAM_SECRET': config.registry.settings['psa.instagram.secret'], 'SOCIAL_AUTH_FLICKR_KEY': config.registry.settings['psa.flickr.key'], 'SOCIAL_AUTH_FLICKR_SECRET': config.registry.settings['psa.flickr.secret'], 'SOCIAL_AUTH_REDDIT_KEY': config.registry.settings['psa.reddit.key'], 'SOCIAL_AUTH_REDDIT_SECRET': config.registry.settings['psa.reddit.secret'], 'SOCIAL_AUTH_REDDIT_AUTH_EXTRA_ARGUMENTS': {}, 'SOCIAL_AUTH_STEAM_API_KEY': config.registry.settings['psa.steam.key'], 'SOCIAL_AUTH_STEAM_EXTRA_DATA': aslist(config.registry.settings['psa.steam.extra']) } config.registry.settings.update(SOCIAL_AUTH_SETTINGS) config.registry.settings.update(SOCIAL_AUTH_KEYS)
def crate_init(config): settings = config.get_settings() engine = create_engine( 'crate://', connect_args={ 'servers': aslist(settings['crate.hosts']) }, echo=asbool(settings.get('crate.echo', 'False')), pool_size=int(settings.get('sql.pool_size', 5)), max_overflow=int(settings.get('sql.max_overflow', 5)) ) DB_SESSION.configure(bind=engine) Base.metadata.bind = engine CRATE_CONNECTION.configure(aslist(settings['crate.hosts']))
def main(argv=sys.argv): # late import so we can run on a non-raspi device from RPi import GPIO if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) GPIO.setmode(GPIO.BCM) for pin in map(int, aslist(settings["brewcontrol.inpins"])): GPIO.setup(pin, GPIO.IN) for pin in map(int, aslist(settings["brewcontrol.outpins"])): GPIO.setup(pin, GPIO.OUT)
def configure(cls, settings): """ Configure the access backend with app settings """ rounds = int(settings.get("auth.rounds", DEFAULT_ROUNDS)) return { "default_read": aslist( settings.get("pypi.default_read", ["authenticated"]) ), "default_write": aslist(settings.get("pypi.default_write", [])), "cache_update": aslist( settings.get("pypi.cache_update", ["authenticated"]) ), "pwd_context": get_pwd_context(rounds), "token_expiration": int(settings.get("auth.token_expire", ONE_WEEK)), "signing_key": settings.get("auth.signing_key"), }
def _get_credentials(self, request): authorization = request.headers.get('Authorization', '') try: authmeth, auth = authorization.split(' ', 1) except ValueError: return None if authmeth.lower() != 'bearer': return None # Use PyFxa defaults if not specified server_url = fxa_conf(request, 'oauth_uri') scope = aslist(fxa_conf(request, 'required_scope')) auth_cache = self._get_cache(request) auth_client = OAuthClient(server_url=server_url, cache=auth_cache) try: profile = auth_client.verify_token(token=auth, scope=scope) user_id = profile['user'] except fxa_errors.OutOfProtocolError as e: logger.error(e) raise httpexceptions.HTTPServiceUnavailable() except (fxa_errors.InProtocolError, fxa_errors.TrustError) as e: logger.info(e) return None return user_id
def get_reader(settings, topic, channel, sentry_client=None): """ Get a :py:class:`gnsq.Reader` instance configured to connect to the nsqd reader addresses specified in settings. The reader will read from the specified topic and channel. The caller is responsible for adding appropriate `on_message` hooks and starting the reader. """ topic = resolve_topic(topic, settings=settings) addrs = aslist(settings.get('nsq.reader.addresses', 'localhost:4150')) reader = gnsq.Reader(topic, channel, nsqd_tcp_addresses=addrs) if sentry_client is not None: extra = {'topic': topic} def _capture_exception(message, error): if message is not None: extra['message'] = message.body sentry_client.captureException(exc_info=True, extra=extra) def _capture_error(error): sentry_client.captureException(exc_info=(type(error), error, None), extra=extra) def _capture_message(message): if message is not None: extra['message'] = message.body sentry_client.captureMessage(extra=extra) reader.on_exception.connect(_capture_exception, weak=False) reader.on_giving_up.connect(_capture_message, weak=False) reader.on_error.connect(_capture_error, weak=False) return reader
def load_from_config(config, prefix=''): settings = config.get_settings() channel = settings['event_listeners.pusher.channel'] resources = aslist(settings['event_listeners.pusher.resources']) return Listener(channel, resources)
def get_login_context(request, force_show_providers=False): slug = request.matchdict.get('discussion_slug', None) if slug: request.session['discussion'] = slug else: request.session.pop('discussion') discussion = discussion_from_request(request) get_routes = create_get_route(request, discussion) providers = get_provider_data(get_routes) hide_registration = (discussion and not public_roles.intersection(set(roles_with_permissions( discussion, P_READ))) and not roles_with_permissions( discussion, P_SELF_REGISTER_REQUEST, P_SELF_REGISTER)) if not force_show_providers: hide_providers = aslist(request.registry.settings.get( 'hide_login_providers', ())) if isinstance(hide_providers, (str, unicode)): hide_providers = (hide_providers, ) providers = [p for p in providers if p['type'] not in hide_providers] return dict(get_default_context(request), providers=providers, providers_json=json.dumps(providers), saml_providers=request.registry.settings.get( 'SOCIAL_AUTH_SAML_ENABLED_IDPS', {}), hide_registration=hide_registration, identifier = request.params.get('identifier', ''), google_consumer_key=request.registry.settings.get( 'google.consumer_key', ''), next=handle_next_view(request), get_route=get_routes)
def get_providers_with_names(providers=None): from ..models.auth import IdentityProvider if providers is None: providers = aslist(config.get('login_providers')) provider_names = dict(IdentityProvider.default_db.query( IdentityProvider.provider_type, IdentityProvider.name).all()) return {pr: provider_names[pr] for pr in providers}
def reapply_cors(request, response): """Reapply cors headers to the new response with regards to the request. We need to re-apply the CORS checks done by Cornice, in case we're recreating the response from scratch. """ service = request.current_service if service: request.info["cors_checked"] = False cors.apply_cors_post_request(service, request, response) response = cors.ensure_origin(service, request, response) else: # No existing service is concerned, and Cornice is not implied. origin = request.headers.get("Origin") if origin: settings = request.registry.settings allowed_origins = set(aslist(settings["cors_origins"])) required_origins = {"*", origin} if allowed_origins.intersection(required_origins): response.headers["Access-Control-Allow-Origin"] = origin # Import service here because kinto.core import utils from kinto.core import Service if Service.default_cors_headers: # pragma: no branch headers = ",".join(Service.default_cors_headers) response.headers["Access-Control-Expose-Headers"] = headers return response
def configure_error_views(self): settings = self.settings # Forbidden view overrides helpful auth debug error messages, # so pull in only when really needed if not asbool(settings["pyramid.debug_authorization"]): from websauna.system.core.views import forbidden self.config.scan(forbidden) if not asbool(settings["pyramid.debug_notfound"]): from websauna.system.core.views import notfound self.config.scan(notfound) # Internal server error must be only activated in the production mode, as it clashes with pyramid_debugtoolbar if "pyramid_debugtoolbar" not in aslist( settings.get("pyramid.includes", [])): from websauna.system.core.views import internalservererror self.config.scan(internalservererror) if settings.get("websauna.error_test_trigger", False): from websauna.system.core.views import errortrigger self.config.scan(errortrigger) self.config.add_route('error_trigger', '/error-trigger')
def upload_file(form_field, path='', resize_image=False, images_thumbnails_path='', size=None): if not path: path = get_param_from_config('image_path') if hasattr(form_field.data, 'filename'): input_file = form_field.data.file input_file.seek(0) # filename = uuid.uuid4().hex + os.path.splitext(os.path.basename(form_field.data.filename))[1] filename = token_urlsafe(12) + os.path.splitext(os.path.basename(form_field.data.filename))[1] file_path = os.path.join(path, filename) temp_file_path = file_path with open(temp_file_path, 'wb') as output_file: shutil.copyfileobj(input_file, output_file) os.rename(temp_file_path, file_path) if resize_image: if not images_thumbnails_path: images_thumbnails_path = get_param_from_config('thumbnails_path') if not size: size = aslist(get_param_from_config('thumbnails_size')) width, height = size resize(filename, path, images_thumbnails_path, (int(width), int(height))) return filename else: return None
def register(self): social_logins = aslist(self.settings.get("websauna.social_logins", "")) if self.request.method == 'GET': if self.request.user: return HTTPFound(location=self.after_register_url) return {'form': self.form.render(), 'social_logins': social_logins} elif self.request.method != 'POST': return # If the request is a POST: controls = self.request.POST.items() try: captured = self.form.validate(controls) except deform.ValidationFailure as e: return {'form': e.render(), 'errors': e.error.children, 'social_logins': social_logins} # With the form validated, we know email and username are unique. del captured['csrf_token'] registration_service = get_registration_service(self.request) return registration_service.sign_up(user_data=captured)
def add_acl_from_settings(config): """ Load ACL data from settings Notes ----- The settings should be in the form:: steward.perm.<permission> = <list of groups> For example:: steward.perm.schedule_write = developer manager This will give any users in the ``developer`` or ``manager`` group access to endpoints with the 'schedule_write' permission. """ settings = config.get_settings() for key, value in settings.iteritems(): if not key.startswith('steward.perm.'): continue permission = key.split('.')[2] for principle in aslist(value): if principle.lower() == 'authenticated': principle = Authenticated elif principle.lower() == 'everyone': principle = Everyone Root.__acl__.insert(0, (Allow, principle, permission))
def includeme(config): """ :type config: pyramid.config.Configurator """ settings = config.get_settings() settings['tm.manager_hook'] = 'pyramid_tm.explicit_manager' config.include('pyramid_services') config.include('pyramid_tm') prefix = settings.get('pyramid_services_sqlalchemy.prefix', 'sqlalchemy.') names = aslist(settings.get(prefix + 'names', '')) if names: for name in names: engine = engine_from_config(settings, prefix=prefix + name + '.') factory = sessionmaker(bind=engine) config.register_service(engine, IDBEngine, name=name) config.register_service(factory, IDBSessionFactory, name=name) config.register_service_factory(DBSessionFactory(factory, name), IDBSession, name=name) config.register_service(tuple(names), IDBNames) else: engine = engine_from_config(settings, prefix=prefix) factory = sessionmaker(bind=engine) config.register_service(engine, IDBEngine) config.register_service(factory, IDBSessionFactory) config.register_service_factory(DBSessionFactory(factory), IDBSession) config.register_service(('', ), IDBNames) config.action(None, configure_mappers)
def register(self): social_logins = aslist(self.settings.get("websauna.social_logins", "")) if self.request.method == 'GET': if self.request.user: return HTTPFound(location=self.after_register_url) return {'form': self.form.render(), 'social_logins': social_logins} elif self.request.method != 'POST': return # If the request is a POST: controls = self.request.POST.items() try: captured = self.form.validate(controls) except deform.ValidationFailure as e: return { 'form': e.render(), 'errors': e.error.children, 'social_logins': social_logins } # With the form validated, we know email and username are unique. del captured['csrf_token'] registration_service = get_registration_service(self.request) return registration_service.sign_up(user_data=captured)
def auto_create_resources(event, resources): storage = event.app.registry.storage permission = event.app.registry.permission write_principals = aslist( event.app.registry. settings["signer.auto_create_resources_principals"]) for resource in resources.values(): perms = {"write": write_principals} bucket = resource["source"]["bucket"] collection = resource["source"]["collection"] bucket_uri = f"/buckets/{bucket}" storage_create_raw( storage_backend=storage, permission_backend=permission, resource_name="bucket", parent_id="", object_uri=bucket_uri, object_id=bucket, permissions=perms, ) # If resource is configured for specific collection, create it too. if collection: collection_uri = f"{bucket_uri}/collections/{collection}" storage_create_raw( storage_backend=storage, permission_backend=permission, resource_name="collection", parent_id=bucket_uri, object_uri=collection_uri, object_id=collection, permissions=perms, )
def reapply_cors(request, response): """Reapply cors headers to the new response with regards to the request. We need to re-apply the CORS checks done by Cornice, in case we're recreating the response from scratch. """ service = request.current_service if service: request.info['cors_checked'] = False cors.apply_cors_post_request(service, request, response) response = cors.ensure_origin(service, request, response) else: # No existing service is concerned, and Cornice is not implied. origin = request.headers.get('Origin') if origin: settings = request.registry.settings allowed_origins = set(aslist(settings['cors_origins'])) required_origins = {'*', decode_header(origin)} if allowed_origins.intersection(required_origins): origin = encode_header(origin) response.headers['Access-Control-Allow-Origin'] = origin # Import service here because kinto.core import utils from kinto.core import Service if Service.default_cors_headers: headers = ','.join(Service.default_cors_headers) response.headers['Access-Control-Expose-Headers'] = headers return response
def __init__(self, request): # Make it available for the authorization policy. self.get_prefixed_userid = functools.partial(prefixed_userid, request) # Store some shortcuts. permission = request.registry.permission self.check_permission = permission.check_permission self._get_accessible_objects = permission.get_accessible_objects # Store current resource and required permission. service = utils.current_service(request) is_on_resource = (service is not None and hasattr(service, 'viewset') and hasattr(service, 'resource')) if is_on_resource: self.resource_name = request.current_resource_name self.on_collection = getattr(service, "type", None) == "collection" self.permission_object_id, self.required_permission = ( self._find_required_permission(request, service)) # To obtain shared records on a collection endpoint, use a match: self._object_id_match = self.get_permission_object_id(request, '*') # Check if principals are allowed explicitly from settings. settings = request.registry.settings setting = '%s_%s_principals' % (self.resource_name, self.required_permission) self.allowed_principals = aslist(settings.get(setting, ''))
def _traced_init(wrapped, instance, args, kwargs): settings = kwargs.get("settings", {}) tweens = aslist(settings.get("pyramid.tweens", [])) if tweens and TWEEN_NAME not in settings: # pyramid.tweens.EXCVIEW is the name of built-in exception view provided by # pyramid. We need our tween to be before it, otherwise unhandled # exceptions will be caught before they reach our tween. tweens = [TWEEN_NAME] + tweens settings["pyramid.tweens"] = "\n".join(tweens) kwargs["settings"] = settings # `caller_package` works by walking a fixed amount of frames up the stack # to find the calling package. So if we let the original `__init__` # function call it, our wrapper will mess things up. if not kwargs.get("package", None): # Get the package for the third frame up from this one. # Default is `level=2` which will give us the package from `wrapt` # instead of the desired package (the caller) kwargs["package"] = caller_package(level=3) wrapped(*args, **kwargs) instance.include("opentelemetry.instrumentation.pyramid.callbacks")
def _make_lookup(self, clear_default_filters=False): default_filters = (['decode.utf8'] if clear_default_filters else ['escape']) template_imports = [ 'from webhelpers2.html import escape', ] template_imports.extend(aslist( self.settings.get('pyramid_frontend.template_imports', ''), flatten=False)) debug = asbool(self.settings.get('pyramid_frontend.debug')) base_module_dir = \ self.settings.get('pyramid_frontend.module_directory') module_dir = base_module_dir and os.path.join(base_module_dir, self.key) return SuperTemplateLookup(directories=self.template_dirs, input_encoding='utf-8', output_encoding='utf-8', imports=template_imports, default_filters=default_filters, filesystem_checks=debug, module_directory=module_dir, cache_impl=self.cache_impl, cache_args=self.cache_args)
def includeme(config): """ Configures the Celery connection from the pyramid side of the application. Pyramid must know how to talk to the celery process in order to send asynchronous jobs. This method will not actually start the celery process. :param config: Pyramid configuration object """ settings = config.registry.settings assert 'celery.blame' in settings, 'Must specify an blame user' app.conf.update( BROKER_URL=settings['celery.broker.url'], CELERY_RESULT_BACKEND=settings['celery.backend.url'], BROKER_TRANSPORT_OPTIONS={ 'fanout_prefix': True, 'fanout_patterns': True }, CELERY_INCLUDE=aslist(settings.get('celery.include', [])), CELERYBEAT_SCHEDULE=_get_schedule(settings) ) app.settings = settings
def pserve_file_config(self, filename, global_conf=None): here = os.path.abspath(os.path.dirname(filename)) defaults = {} if global_conf: defaults.update(global_conf) defaults['here'] = here config = self.ConfigParser(defaults=defaults) config.optionxform = str config.read(filename) try: items = dict(config.items('pserve')) except configparser.NoSectionError: return watch_files = aslist(items.get('watch_files', ''), flatten=False) # track file paths relative to the ini file resolver = AssetResolver(package=None) for file in watch_files: if ':' in file: file = resolver.resolve(file).abspath() elif not os.path.isabs(file): file = os.path.join(here, file) self.watch_files.append(os.path.abspath(file))
def configure_error_views(self): settings = self.settings # Forbidden view overrides helpful auth debug error messages, # so pull in only when really needed if not asbool(settings["pyramid.debug_authorization"]): from websauna.system.core.views import forbidden self.config.scan(forbidden) if not asbool(settings["pyramid.debug_notfound"]): from websauna.system.core.views import notfound self.config.scan(notfound) # Internal server error page must be only activated in the production mode, as it clashes with pyramid_debugtoolbar, as both handle uncaught exceptions has_debug_toolbar = "pyramid_debugtoolbar" in aslist( settings.get("pyramid.includes", [])) debug_toolbar_enabled = has_debug_toolbar and asbool( settings.get("debugtoolbar.enabled", True)) if not debug_toolbar_enabled: from websauna.system.core.views import internalservererror self.config.scan(internalservererror) if settings.get("websauna.error_test_trigger", False): from websauna.system.core.views import errortrigger self.config.scan(errortrigger) self.config.add_route('error_trigger', '/error-trigger') from websauna.system.core.views import badcsrftoken self.config.scan(badcsrftoken)
def _formatter_callback_factory(): # pragma: no cover """Returns a list of includes to be given to `cnxepub.collation.collate`. """ includes = [] exercise_url_template = '{baseUrl}/api/exercises?q={field}:"{{itemCode}}"' settings = get_current_registry().settings exercise_base_url = settings.get('embeddables.exercise.base_url', None) exercise_matches = [match.split(',', 1) for match in aslist( settings.get('embeddables.exercise.match', ''), flatten=False)] exercise_token = settings.get('embeddables.exercise.token', None) mathml_url = settings.get('mathmlcloud.url', None) memcache_servers = settings.get('memcache_servers') if memcache_servers: memcache_servers = memcache_servers.split() else: memcache_servers = None if exercise_base_url and exercise_matches: mc_client = None if memcache_servers: mc_client = memcache.Client(memcache_servers, debug=0) for (exercise_match, exercise_field) in exercise_matches: template = exercise_url_template.format( baseUrl=exercise_base_url, field=exercise_field) includes.append(exercise_callback_factory(exercise_match, template, mc_client, exercise_token, mathml_url)) return includes
def reapply_cors(request, response): """Reapply cors headers to the new response with regards to the request. We need to re-apply the CORS checks done by Cornice, in case we're recreating the response from scratch. """ service = current_service(request) if service: request.info['cors_checked'] = False cors.apply_cors_post_request(service, request, response) response = cors.ensure_origin(service, request, response) else: # No existing service is concerned, and Cornice is not implied. origin = request.headers.get('Origin') if origin: settings = request.registry.settings allowed_origins = set(aslist(settings['cors_origins'])) required_origins = {'*', decode_header(origin)} if allowed_origins.intersection(required_origins): origin = encode_header(origin) response.headers['Access-Control-Allow-Origin'] = origin # Import service here because cliquet import utils from cliquet import Service if Service.default_cors_headers: headers = ','.join(Service.default_cors_headers) response.headers['Access-Control-Expose-Headers'] = headers return response
def from_settings(cls, settings, prefix='mail.'): """Create a new instance of 'Mailer' from settings dict. :param settings: a settings dict-like :param prefix: prefix separating 'pyramid_mailer' settings """ settings = settings or {} kwarg_names = [prefix + k for k in ( 'host', 'port', 'username', 'password', 'tls', 'ssl', 'keyfile', 'certfile', 'queue_path', 'debug', 'default_sender', 'sendmail_app', 'sendmail_template')] size = len(prefix) kwargs = dict(((k[size:], settings[k]) for k in settings.keys() if k in kwarg_names)) for key in ('tls', 'ssl'): val = kwargs.get(key) if val: kwargs[key] = asbool(val) for key in ('debug', 'port'): val = kwargs.get(key) if val: kwargs[key] = int(val) # list values for key in ('sendmail_template', ): if key in kwargs: kwargs[key] = aslist(kwargs.get(key)) return cls(**kwargs)