def configure_task(task_name): global _config config = _config[task_name] tc = util.prefixed_keys(config, 'kombu.') tc.update(util.prefixed_keys(config, 'task.')) resolver = DottedNameResolver() cls = resolver.resolve(tc['class']) if 'queues' in tc: tc['queues'] = dict( (k, get_queue(q)) for k, q in util.as_dict(tc['queues']).items()) else: tc['exchanges'] = {} if 'exchanges' in tc: tc['exchanges'] = dict( (e, get_exchange(e)) for e in util.as_list(tc['exchanges'])) else: tc['exchanges'] = {} if 'schedule' in tc: tc['schedule'] = _parse_schedule(util.as_dict(tc['schedule'])) if 'rate' in tc: tc['rate'] = { 'rate': float(tc['rate'].strip()), 'capacity': float(tc['capacity']) } cls.configure(**tc) return cls
def create_dummy_extract(): date = datetime.datetime.now() real_estate = RealEstateRecord( u'test', u'BL', u'Laufen', 2770, 1000, MultiPolygon(), ViewServiceRecord('test_link', 1, 1.0, {'de': 'test_legend'})) plr_office = OfficeRecord({u'en': u'PLR Authority'}) resolver = DottedNameResolver() date_method_string = Config.get('extract').get('base_data').get( 'methods').get('date') date_method = resolver.resolve(date_method_string) av_update_date = date_method(real_estate) base_data = Config.get_base_data(av_update_date) av_provider_method_string = Config.get('extract').get('base_data').get( 'methods').get('provider') av_provider_method = resolver.resolve(av_provider_method_string) cadaster_state = date theme = ThemeRecord(u'TEST', {u'de': u'TEST TEXT'}) datasources = [DatasourceRecord(theme, date, plr_office)] plr_cadastre_authority = Config.get_plr_cadastre_authority() embeddable = EmbeddableRecord(cadaster_state, plr_cadastre_authority, av_provider_method(real_estate), av_update_date, datasources) record = ExtractRecord(real_estate, ImageRecord('100'.encode('utf-8')), ImageRecord('100'.encode('utf-8')), ImageRecord('100'.encode('utf-8')), ImageRecord('100'.encode('utf-8')), plr_office, base_data, embeddable) return record
def includeme(config): """ Let extdirect be included by config.include(). """ settings = config.registry.settings extdirect_config = dict() names = ("api_path", "router_path", "namespace", "descriptor", "expose_exceptions", "debug_mode", "json_encoder") for name in names: qname = "pyramid_extdirect.{}".format(name) value = settings.get(qname, None) if name == "expose_exceptions" or name == "debug_mode": value = (value == "true") if name == "json_encoder" and value: from pyramid.path import DottedNameResolver resolver = DottedNameResolver() value = resolver.resolve(value) if value is not None: extdirect_config[name] = value extd = Extdirect(**extdirect_config) config.registry.registerUtility(extd, IExtdirect) api_view_perm = settings.get("pyramid_extdirect.api_view_permission") config.add_route('extapi', extd.api_path) config.add_view(api_view, route_name='extapi', permission=api_view_perm) router_view_perm = settings.get("pyramid_extdirect.router_view_permission") config.add_route('extrouter', extd.router_path) config.add_view(router_view, route_name='extrouter', permission=router_view_perm)
def get_init(global_config, settings, init_cls=None) -> Initializer: """Get Initializer class instance for WSGI-like app. TODO: Deprecated. Use Pyramid's ``bootstrap()`` instead. Reads reference to the initializer from settings, resolves it and creates the initializer instance. Example 1:: config_uri = argv[1] init = get_init(dict(__file__=config_uri), settings) :param global_config: Global config dictionary, having __file__ entry as given by Paster :param settings: Settings dictionary :param init_cls: Explicitly give the Initializer class to use, otherwise read ``websauna.init`` settings. """ assert "websauna.init" in settings, "You must have websauna.init setting pointing to your Initializer class" assert "__file__" in global_config if not init_cls: init_cls = settings.get("websauna.init") if not init_cls: raise RuntimeError("INI file lacks websauna.init option") resolver = DottedNameResolver() init_cls = resolver.resolve(init_cls) init = init_cls(global_config, settings) return init
def debug(request, registry, settings): """Invoke pdb breakpoint from a template. Example: .. code-block:: html+jinja <h1>{{ site_name }}</h1> {{ debug() }} This will invoke function from :ref:`websauna.template_debugger` setting. The debugger is turned on only on :ref:`development.ini`. If there is no debugger configured, nothing happens. """ def _dummy(): return "" template_debugger = settings.get("websauna.template_debugger") if not template_debugger: return _dummy() r = DottedNameResolver() debugger = r.resolve(template_debugger) assert debugger, "Could not find debugger in websauna.template_debugger setting: {}".format(template_debugger) def _inner(): debugger() return "" return _inner
def configure_mailer(self, settings): """Configure outgoing email backend based on the INI settings.""" settings = settings.copy() # Empty values are not handled gracefully, so mutate them here before passing forward to mailer if settings.get("mail.username", "x") == "": settings["mail.username"] = None if settings.get("mail.password", "x") == "": settings["mail.password"] = None mailer_class = settings.get("websauna.mailer", "") if mailer_class in ("mail", ""): # TODO: Make mailer_class explicit so we can dynamically load pyramid_mail.Mailer # Default from pyramid_mailer import mailer_factory_from_settings mailer = mailer_factory_from_settings(settings) self.config.registry.registerUtility(mailer, IMailer) else: # debug backend resolver = DottedNameResolver() mailer_cls = resolver.resolve(mailer_class) mailer = mailer_cls() self.config.registry.registerUtility(mailer, IMailer)
def mailer_settings_factory(settings, prefix='photoapp.mailer.'): """Returns the correctly configured specific implementation of an IMailer object based on settings. The specific implementation is determined by the `type` setting, currently: smtp: send mail from SMTP server console: print mail messages to stdout (does not send any mail) `console` is the default type. Args: settings: dict of settings prefix: specific prefix to find mailer-specific settings Returns: IMailer instance """ classname = settings.get(prefix + 'class', 'photoapp.mail.SMTP_Mailer') mailer_cls = DottedNameResolver().resolve(classname) return mailer_cls.from_settings(settings, prefix)
def main(argv=sys.argv, processor=None): logger = logging.getLogger(__name__) if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) settings = setup_database({}, **settings) session = settings['session'] subscription_model = SubscriptionModel(session) tx_model = TransactionModel(session) maximum_retry = int(settings.get( 'billy.transaction.maximum_retry', TransactionModel.DEFAULT_MAXIMUM_RETRY, )) resolver = DottedNameResolver() if processor is None: processor_factory = settings['billy.processor_factory'] processor_factory = resolver.maybe_resolve(processor_factory) processor = processor_factory() # yield all transactions and commit before we process them, so that # we won't double process them. with db_transaction.manager: logger.info('Yielding transaction ...') subscription_model.yield_transactions() with db_transaction.manager: logger.info('Processing transaction ...') tx_model.process_transactions(processor, maximum_retry=maximum_retry) logger.info('Done')
def test_app_no_login_real_policy(request, test_app_no_perm): """A configured Assembl fixture with permissions and no user logged in""" config = testing.setUp( registry=test_app_no_perm.app.registry, settings=get_config(), ) from ...auth.util import authentication_callback from pyramid.authorization import ACLAuthorizationPolicy from pyramid.path import DottedNameResolver resolver = DottedNameResolver(__package__) auth_policy_name = "assembl.auth.util.UpgradingSessionAuthenticationPolicy" auth_policy = resolver.resolve(auth_policy_name)( callback=authentication_callback) config.set_authorization_policy(ACLAuthorizationPolicy()) config.set_authentication_policy(auth_policy) import transaction # ensure default roles and permissions at startup from ...models import get_session_maker with transaction.manager: session = get_session_maker() from ...lib.migration import bootstrap_db_data bootstrap_db_data(session, False) return test_app_no_perm
def debug(request, registry, settings): """Invoke pdb breakpoint from a template. Example: .. code-block:: html+jinja <h1>{{ site_name }}</h1> {{ debug() }} This will invoke function from :ref:`websauna.template_debugger` setting. The debugger is turned on only on :ref:`development.ini`. If there is no debugger configured, a warning is given. """ def _dummy(): logger.warn( "{{ debug() }} invoked, but websauna.template_debugger not set") return "" template_debugger = settings.get("websauna.template_debugger") if not template_debugger: debugger = _dummy else: r = DottedNameResolver() debugger = r.resolve(template_debugger) def _inner(): debugger() return "" return _inner
def start_scheduler(config): reader = config.settings_reader() settings = reader.settings scheduler = reader.read_str('papaye.scheduler') scheduler_keys = (key[17:] for key in settings.keys() if key.startswith('papaye.scheduler.')) if not reader.read_bool('papaye.cache') or scheduler is None: scheduler = DummyScheduler() else: resolver = DottedNameResolver() scheduler_kwargs = {key[17:]: val for key, val in settings.items() if key in scheduler_keys} scheduler = resolver.maybe_resolve(scheduler)(**scheduler_kwargs) scheduler.start() TaskRegistry().register_scheduler(scheduler) def get_scheduler(request): return scheduler config.add_request_method( get_scheduler, 'scheduler', property=True, reify=True )
def get_db_session(request, settings=None): if settings is None: settings = request.registry.settings session_path = settings.get('bowab.db_session', DBSession) resolver = DottedNameResolver() db_session = resolver.maybe_resolve(session_path) return db_session
def site_factory(request): """Application site factory On application startup, this factory checks configuration to get application name and load it from the ZODB; if the application can't be found, configuration is scanned to get application factory, create a new one and create a local site manager. """ conn = get_connection(request) root = conn.root() application_key = request.registry.settings.get(PYAMS_APPLICATION_SETTINGS_KEY, PYAMS_APPLICATION_DEFAULT_NAME) application = root.get(application_key) if application is None: factory = request.registry.settings.get(PYAMS_APPLICATION_FACTORY_KEY) if factory: resolver = DottedNameResolver() factory = resolver.maybe_resolve(factory) else: factory = request.registry.queryUtility(ISiteRootFactory, default=BaseSiteRoot) application = root[application_key] = factory() if IPossibleSite.providedBy(application): lsm = LocalSiteManager(application, default_folder=False) application.setSiteManager(lsm) try: # if some components require a valid and complete registry # with all registered utilities, they can subscribe to # INewLocalSiteCreatedEvent event interface set_local_registry(application.getSiteManager()) get_current_registry().notify(NewLocalSiteCreatedEvent(application)) finally: set_local_registry(None) import transaction # pylint: disable=import-outside-toplevel transaction.commit() return application
def run(self): """Run the initialization and prepare Pyramid subsystems. This is the main entry for ramping up a Websauna application. We go through various subsystem inits. """ # TODO: Remove passing settings to methods as an argument settings = self.settings self.secrets = self.read_secrets() self.configure_logging(settings) # Serving self.configure_templates() self.configure_static() # Forms self.configure_forms() self.configure_crud(settings) # Email self.configure_mailer(settings) # Timed tasks self.configure_scheduler(settings) self.configure_tasks(settings) # Core view and layout related self.configure_root() self.configure_error_views() self.configure_views() self.configure_panels(settings) self.configure_sitemap(settings) # Website administration self.configure_admin(settings) # Sessions and users self.configure_sessions(settings, self.secrets) self.configure_user(settings, self.secrets) self.configure_model_admins() self.configure_notebook() # Configure addons before anything else, so we can override bits from addon, like template lookup paths, later easily self.configure_addons() # Database # This must be run before configure_database() because SQLAlchemy will resolve @declared_attr and we must have config present by then self.configure_instrumented_models() self.configure_database() # Tests can pass us some extra initialization work on ad hoc extra_init = self.global_config.get("extra_init") if extra_init: resolver = DottedNameResolver() extra_init = resolver.resolve(extra_init) extra_init(self)
def includeme(config): """Bind to the db engine specifed in ``config.registry.settings``. Setup:: >>> from mock import Mock >>> import pyramid_basemodel >>> _engine_from_config = pyramid_basemodel.engine_from_config >>> _bind_engine = pyramid_basemodel.bind_engine >>> pyramid_basemodel.engine_from_config = Mock() >>> pyramid_basemodel.engine_from_config.return_value = 'engine' >>> pyramid_basemodel.bind_engine = Mock() >>> mock_config = Mock() >>> mock_config.registry.settings = {} Calls ``bind_engine`` with the configured ``engine``:: >>> includeme(mock_config) >>> >>> mock_config.action.assert_called_with(None, ... pyramid_basemodel.bind_engine, ... ('engine',), ... {'should_create': False, 'should_drop': False}) Unless told not to:: >>> pyramid_basemodel.bind_engine = Mock() >>> mock_config = Mock() >>> mock_config.registry.settings = {'basemodel.should_bind_engine': False} >>> includeme(mock_config) >>> mock_config.action.called False Teardown:: >>> pyramid_basemodel.engine_from_config = _engine_from_config >>> pyramid_basemodel.bind_engine = _bind_engine """ # Bind the engine. settings = config.get_settings() engine_kwargs_factory = settings.pop('sqlalchemy.engine_kwargs_factory', None) if engine_kwargs_factory: kwargs_factory = config.maybe_dotted(engine_kwargs_factory) engine_kwargs = kwargs_factory(config.registry) else: engine_kwargs = {} pool_class = settings.pop('sqlalchemy.pool_class', None) if pool_class: dotted_name = DottedNameResolver() engine_kwargs['poolclass'] = dotted_name.resolve(pool_class) should_bind = asbool(settings.get('basemodel.should_bind_engine', True)) should_create = asbool(settings.get('basemodel.should_create_all', False)) should_drop = asbool(settings.get('basemodel.should_drop_all', False)) if should_bind: engine = engine_from_config(settings, 'sqlalchemy.', **engine_kwargs) config.action(None, bind_engine, (engine,), { 'should_create': should_create, 'should_drop': should_drop})
def debug(request, registry, settings): """Invoke pdb breakpoint from a template. Example: .. code-block:: html+jinja <h1>{{ site_name }}</h1> {{ debug() }} This will invoke function from :ref:`websauna.template_debugger` setting. The debugger is turned on only on :ref:`development.ini`. If there is no debugger configured, a warning is given. """ def _dummy(): logger.warn("{{ debug() }} invoked, but websauna.template_debugger not set") return "" template_debugger = settings.get("websauna.template_debugger") if not template_debugger: debugger = _dummy else: r = DottedNameResolver() debugger = r.resolve(template_debugger) def _inner(): debugger() return "" return _inner
def render(resource): """TALES extension rendering method""" library, resource_name = resource.split(':') resolver = DottedNameResolver() module = resolver.maybe_resolve(library) resource = getattr(module, resource_name) return get_resource_path(resource)
def configure_mailer(self): """Configure outgoing email backend and email test views.""" from pyramid_mailer import IMailer settings = self.settings.copy() # Empty values are not handled gracefully, so mutate them here before passing forward to mailer if settings.get("mail.username", "x") == "": settings["mail.username"] = None if settings.get("mail.password", "x") == "": settings["mail.password"] = None mailer_class = settings.get("websauna.mailer", "") if mailer_class in ("mail", ""): # TODO: Make mailer_class explicit so we can dynamically load pyramid_mail.Mailer # Default from pyramid_mailer import mailer_factory_from_settings mailer = mailer_factory_from_settings(settings) self.config.registry.registerUtility(mailer, IMailer) else: # debug backend resolver = DottedNameResolver() mailer_cls = resolver.resolve(mailer_class) mailer = mailer_cls() self.config.registry.registerUtility(mailer, IMailer) if settings.get("websauna.sample_html_email", False): from websauna.system.mail import views self.config.scan(views) self.config.add_jinja2_search_path('websauna.system:mail/templates', name='.html')
def configure_mailer(self): """Configure outgoing email backend and email test views.""" from pyramid_mailer import IMailer settings = self.settings.copy() # Empty values are not handled gracefully, so mutate them here before passing forward to mailer if settings.get("mail.username", "x") == "": settings["mail.username"] = None if settings.get("mail.password", "x") == "": settings["mail.password"] = None mailer_class = settings.get("websauna.mailer", "") if mailer_class in ("mail", ""): # TODO: Make mailer_class explicit so we can dynamically load pyramid_mail.Mailer # Default from pyramid_mailer import mailer_factory_from_settings mailer = mailer_factory_from_settings(settings) self.config.registry.registerUtility(mailer, IMailer) else: # debug backend resolver = DottedNameResolver() mailer_cls = resolver.resolve(mailer_class) mailer = mailer_cls() self.config.registry.registerUtility(mailer, IMailer) if settings.get("websauna.sample_html_email", False): from websauna.system.mail import views self.config.scan(views) self.config.add_jinja2_search_path( 'websauna.system:mail/templates', name='.html')
def get_storage_impl(settings) -> Callable[[Any], Any]: """ Get and configure the storage backend wrapper """ resolver = DottedNameResolver(__name__) storage = settings.get("pypi.storage", "file") if storage == "azure-blob": if not AZURE_BLOB_IS_AVAILABLE: raise ValueError( "azure-blob storage backend selected but Azure Blob " "Storage is not available. " "Please install the azure-storage-blob library by " "including the `azure-blob` extra in your pip-install step. " "For example: `pip install pypicloud[azure-blob]`") storage = "pypicloud.storage.AzureBlobStorage" elif storage == "s3": storage = "pypicloud.storage.S3Storage" elif storage == "cloudfront": storage = "pypicloud.storage.CloudFrontS3Storage" elif storage == "gcs": if not GCS_IS_AVAILABLE: raise ValueError( "gcs backend selected but GCS is not available. " "Please install the google-cloud-storage library by " "including the `gcs` extra in your pip-install step. " "For example: `pip install pypicloud[gcs]`") storage = "pypicloud.storage.GoogleCloudStorage" elif storage == "file": storage = "pypicloud.storage.FileStorage" storage_impl = resolver.resolve(storage) kwargs = storage_impl.configure(settings) return partial(storage_impl, **kwargs)
def _parse_settings(settings): rawes_args = {} defaults = { 'url': 'http://localhost:9200', 'timeout': 30, 'path': '', 'json_encoder': encode_date_optional_time, } rawes_args = defaults.copy() # set string settings for short_key_name in ('path',): key_name = 'rawes.%s' % (short_key_name,) if key_name in settings: rawes_args[short_key_name] = \ settings.get(key_name, defaults.get(short_key_name)) # set list settings for short_key_name in ('url',): key_name = 'rawes.%s' % (short_key_name,) if key_name in settings: rawes_args[short_key_name] = \ (aslist(settings.get(key_name, defaults.get(short_key_name))) if len(aslist(settings.get(key_name, defaults.get(short_key_name)))) > 1 else settings.get(key_name, defaults.get(short_key_name)).strip()) # integer settings for short_key_name in ('timeout',): key_name = 'rawes.%s' % (short_key_name,) if key_name in settings: rawes_args[short_key_name] = \ int(settings.get(key_name, defaults.get(short_key_name))) # function settings for short_key_name in ('json_encoder',): key_name = 'rawes.%s' % (short_key_name,) r = DottedNameResolver() if key_name in settings: rawes_args[short_key_name] = \ r.resolve(settings.get(key_name)) for short_key_name in ('json_decoder',): key_name = 'rawes.%s' % (short_key_name,) r = DottedNameResolver() if key_name in settings: rawes_args[short_key_name] = \ r.resolve(settings.get(key_name))().decode # removed settings for short_key_name in ('connection_type', 'except_on_error'): key_name = 'rawes.%s' % (short_key_name,) if key_name in settings: warnings.warn( '%s is no longer supported, please remove from your settings.', UserWarning ) return rawes_args
def command(self): ini_file = self.args[-1] # pylint: disable=E1101 # TheLMA setup. config = self.__setup_thelma(ini_file) # Initialize the tool and run it. rsv = DottedNameResolver(None) tool_cls = rsv.resolve(self.__target_class.tool) arg_names = [od[1] for od in self.__target_class.option_defs] # Initializing lazy options. We pass the target class and the # options so the callback has access to them. opts = self.options # pylint: disable=E1101 for arg_name in arg_names: arg_value = getattr(opts, arg_name) if isinstance(arg_value, LazyOptionCallback): arg_value = arg_value.initialize(self.__target_class, opts) setattr(opts, arg_name, arg_value) # We only now do a final loop over the options so that lazy option # callbacks get a chance to set dependent option values. kw = {} for arg_name in arg_names: kw[arg_name] = getattr(opts, arg_name) # Remove options that are for command use only. for opt in self.parser.option_list: if opt.dest in kw and opt.pass_to_tool is False: del kw[opt.dest] tool = tool_cls(**kw) try: tool.run() except: transaction.abort() raise else: if tool.has_errors(): err_msgs = tool.get_messages() msg = 'Errors occurred during the tool run. Error messages:\n' raise RuntimeError(msg + os.linesep.join(err_msgs)) warn_msgs = tool.get_messages(logging_level=logging.WARNING) if warn_msgs \ and not self.options.ignore_warnings: # pylint: disable=E1101 msg = 'Warnings occurred during the tool run. You can ' \ 'repeat the run with the --ignore-warnings switch ' \ 'to force changes to be committed. Warning messages:\n' raise RuntimeError(msg + os.linesep.join(warn_msgs)) try: # This gives the tool command a chance to perform actions after # the tool has run. self.__target_class.finalize(tool, opts) except: transaction.abort() raise else: # Create a report of the run. self.__run_report(tool) # All good - check if we should commit. if not self.options.simulate: # pylint: disable=E1101 transaction.commit() else: transaction.abort() config.end()
def add_logging(config, log_key, log_func=None): resolver = DottedNameResolver() if log_func is None: log_func = operator.attrgetter(log_key) log_func = resolver.maybe_resolve(log_func) config.registry[get_key("registered_loggers")][log_key] = log_func config.registry[get_key("order")].append(log_key)
def get_processor_factory(settings): """Get processor factory from settings and return """ resolver = DottedNameResolver() processor_factory = settings['billy.processor_factory'] processor_factory = resolver.maybe_resolve(processor_factory) return processor_factory
def __new__(cls, namespace, base, *args, **kw): # Dotted name support makes it easy to configure with pyramid_multiauth name_resolver = DottedNameResolver(caller_package()) base = name_resolver.maybe_resolve(base) # Dynamically create a subclass name = 'Namespaced_%s_%s' % (namespace, base.__name__) klass = type(name, (cls, base), {'_namespace_prefix': namespace + '.'}) return super(NamespacedAuthenticationPolicy, klass).__new__(klass)
def _load_module(config, package, module): try: resolver = DottedNameResolver() # log.debug('{0}.{1}'.format(package, module)) prefix = resolver.resolve('{0}.{1}:ROUTE_PREFIX'.format(package, module)) except ImportError, e: prefix = None
def init_transistor(self, **kwargs): if kwargs['output'].scheme == 'file' and \ kwargs['output'].netloc == '-': del self.channels['output'] drain = self.init_stream_drain(**kwargs) elif kwargs['output'].scheme.lower() in ZMQ_TRANSPORTS: drain = self.init_zmq_drain(**kwargs) elif kwargs['output'].scheme == 'kafka': del self.channels['output'] drain = self.init_kafka_drain(**kwargs) elif kwargs['output'].scheme == 'sqs': del self.channels['output'] drain = self.init_sqs_drain(**kwargs) else: raise ValueError( 'Unsupported drain scheme: {}'.format(kwargs['output'].scheme) ) # The gate "has" a drain; # a source "has" a gate resolver = DottedNameResolver() transducer = resolver.maybe_resolve(kwargs['transducer']) if kwargs['transducer_config']: transducer = transducer(*kwargs['transducer_config']) kwargs['gate'] = Gate( self.logger, self.loop, drain, transducer, ) if not kwargs['input'][0].scheme and kwargs['input'][0].path == '-': del self.channels['input'] source = self.init_stream_source(**kwargs) elif kwargs['input'][0].scheme == 'file': del self.channels['input'] source = self.init_pailfile_source(**kwargs) elif kwargs['input'][0].scheme.lower() in ZMQ_TRANSPORTS: source = self.init_zmq_source(**kwargs) elif kwargs['input'][0].scheme == 'kafka': del self.channels['input'] source = self.init_kafka_source(**kwargs) elif kwargs['input'][0].scheme == 'sqs': del self.channels['input'] source = self.init_sqs_source(**kwargs) else: raise ValueError( 'Unsupported source scheme: {}'.format(kwargs['input'].scheme) ) return Transistor( self.logger, self.loop, kwargs['gate'], source, drain, )
def _load_module(config, package, module): try: resolver = DottedNameResolver() # log.debug('{0}.{1}'.format(package, module)) prefix = resolver.resolve('{0}.{1}:ROUTE_PREFIX'.format( package, module)) except ImportError, e: prefix = None
def includeme(config): """ Set up and configure the pypicloud app """ config.set_root_factory(Root) config.include('pyramid_tm') config.include('pyramid_beaker') config.include('pyramid_duh') config.include('pyramid_duh.auth') config.include('pypicloud.auth') config.include('pypicloud.access') settings = config.get_settings() config.add_renderer('json', json_renderer) # Jinja2 configuration settings['jinja2.filters'] = { 'static_url': 'pyramid_jinja2.filters:static_url_filter', 'tojson': to_json, } settings['jinja2.directories'] = ['pypicloud:templates'] config.include('pyramid_jinja2') # BEAKER CONFIGURATION settings.setdefault('session.type', 'cookie') settings.setdefault('session.httponly', 'true') config.set_session_factory(session_factory_from_settings(settings)) # PYPICLOUD SETTINGS config.registry.fallback_url = settings.get('pypi.fallback_url', 'http://pypi.python.org/simple') config.registry.use_fallback = asbool(settings.get('pypi.use_fallback', True)) realm = settings.get('pypi.realm', 'pypi') config.registry.realm = realm # CACHING DATABASE SETTINGS resolver = DottedNameResolver(__name__) dotted_cache = settings.get('pypi.db', 'sql') if dotted_cache == 'sql': dotted_cache = 'pypicloud.cache.SQLCache' elif dotted_cache == 'redis': dotted_cache = 'pypicloud.cache.RedisCache' cache_impl = resolver.resolve(dotted_cache) cache_impl.configure(config) cache_impl.reload_if_needed() config.add_request_method(cache_impl, name='db', reify=True) # Special request methods config.add_request_method(_app_url, name='app_url') config.add_request_method(lambda x: __version__, name='pypicloud_version', reify=True) cache_max_age = int(settings.get('pyramid.cache_max_age', 3600)) config.add_static_view(name='static/%s' % __version__, path='pypicloud:static', cache_max_age=cache_max_age)
def processor(self): """The payment processor """ settings = self.registry.settings resolver = DottedNameResolver() processor_factory = settings['billy.processor_factory'] processor_factory = resolver.maybe_resolve(processor_factory) processor = processor_factory() return processor
def deserialize(self, node, cstruct): if callable(cstruct): return cstruct elif isinstance(cstruct, str): resolver = DottedNameResolver() scheduler = resolver.maybe_resolve(cstruct) return scheduler raise Invalid(node, '{} is not a valid Python dotted name'.format( cstruct ))
def pyramid_config_from_settings(global_config, app_settings): config_file = global_config.get('__file__') settings = load_settings_from_file(config_file) settings.update(app_settings) caller = caller_module() resolver = DottedNameResolver(caller) package = resolver.get_package() config = Configurator(settings=settings, package=package) return config
def to_config(cls, global_config, app_settings): config_file = global_config.get('__file__') settings = cls.load_settings_from_file(config_file) settings.update(app_settings) caller = caller_module() resolver = DottedNameResolver(caller) package = resolver.get_package() config = Configurator(settings=settings, package=package) return config
def deserialize(self, node, cstruct): if cstruct is colander.null or cstruct is None: return colander.null resolver = DottedNameResolver() try: return resolver.resolve(cstruct) except ImportError as e: raise Invalid(node, str(e))
def get_api_class(registry): """Looks up a the API class to use within a Pyramid configuration registry. It will return the TemplateAPI class if none is found. """ api_class = TemplateAPI if hasattr(registry, 'settings'): api_class_path = registry.settings.get('bowab.api_class', api_class) resolver = DottedNameResolver() api_class = resolver.maybe_resolve(api_class_path) return api_class
def __init__(self, registry=None, package=None, autocommit=True, # Entity level services. filter_specification_factory=None, order_specification_factory=None, # Application level services. service=None, cql_filter_specification_visitor=None, sql_filter_specification_visitor=None, eval_filter_specification_visitor=None, cql_order_specification_visitor=None, sql_order_specification_visitor=None, eval_order_specification_visitor=None, url_converter=None, **kw ): if package is None: package = caller_package() call_setup = registry is None if call_setup: # Need to initialize our registry here to call our setup_registry # with the given custom option values rather than from the base # class constructor. # FIXME: There is some code duplication with Pyramid here. name_resolver = DottedNameResolver(package) package_name = name_resolver.get_package_name() registry = Registry(package_name) self.registry = registry # FIXME: Investigate why we need the "autocommit=True" flag here. PyramidConfigurator.__init__(self, registry=registry, package=package, autocommit=autocommit, **kw) # Set up configurator's load_zcml method. self.add_directive('load_zcml', load_zcml, action_wrap=False) if call_setup: self.setup_registry( filter_specification_factory=filter_specification_factory, order_specification_factory=order_specification_factory, service=service, cql_filter_specification_visitor= cql_filter_specification_visitor, sql_filter_specification_visitor= sql_filter_specification_visitor, eval_filter_specification_visitor= eval_filter_specification_visitor, cql_order_specification_visitor= cql_order_specification_visitor, sql_order_specification_visitor= sql_order_specification_visitor, eval_order_specification_visitor= eval_order_specification_visitor, url_converter=url_converter, **kw)
def parse_asset_settings(settings): config = configparser.SafeConfigParser() asset_resolver = AssetResolver() dotted_resolver = DottedNameResolver() asset_config = settings.get('assets.config') print asset_config try: s = asset_resolver.resolve(asset_config).abspath() config.read(s) except AttributeError: raise try: store_locally = config.getboolean('assets', 'store_locally') except configparser.NoSectionError: try: with open(asset_config) as fp: config.readfp(fp) except IOError: raise else: store_locally = config.getboolean('assets', 'store_locally') result = dict( store_locally=store_locally, tmp_path=asset_resolver.resolve(config.get('assets', 'tmp_path')).abspath(), save_path=asset_resolver.resolve(config.get('assets', 'save_path')).abspath() ) c = config.items('assets:local') for key, value in c: # Skip any urls since they don't need to be resolved # TODO: Might produce bugs if module name starts with `http` if value.startswith('http'): result[key] = value continue try: value = asset_resolver.resolve(value).abspath() except ValueError: # This gets raised if the name isn't in dotted notation pass except ImportError: # This gets raised if there's ":" in the value but it's not a module pass finally: result[key] = value if not store_locally: c = dict(config.items('assets:cloud')) c['service'] = dotted_resolver.resolve(c.get('service')) result.update(c) return result
def get_test_extract(): date = datetime.datetime.now() with pyramid_oereb_test_config(): view_service = ViewServiceRecord(u'http://geowms.bl.ch', 1, 1.0, u'http://geowms.bl.ch', None) real_estate = RealEstateRecord(u'RealEstate', u'BL', u'Liestal', 2829, 11395, MultiPolygon([Polygon([(0, 0), (1, 1), (1, 0)])]), u'http://www.geocat.ch', u'1000', u'BL0200002829', u'CH775979211712') real_estate.set_view_service(view_service) real_estate.set_main_page_view_service(view_service) office_record = OfficeRecord({'de': u'AGI'}) resolver = DottedNameResolver() date_method_string = Config.get('extract').get('base_data').get('methods').get('date') date_method = resolver.resolve(date_method_string) av_update_date = date_method(real_estate) base_data = Config.get_base_data(av_update_date) av_provider_method_string = Config.get('extract').get('base_data').get('methods').get('provider') av_provider_method = resolver.resolve(av_provider_method_string) cadaster_state = date theme = ThemeRecord(u'TEST', {'de': u'TEST TEXT'}) datasources = [DatasourceRecord(theme, date, office_record)] plr_cadastre_authority = Config.get_plr_cadastre_authority() embeddable = EmbeddableRecord( cadaster_state, plr_cadastre_authority, av_provider_method(real_estate), av_update_date, datasources ) extract = ExtractRecord( real_estate, ImageRecord('1'.encode('utf-8')), ImageRecord('2'.encode('utf-8')), ImageRecord('3'.encode('utf-8')), ImageRecord('4'.encode('utf-8')), office_record, base_data, embeddable, exclusions_of_liability=[ ExclusionOfLiabilityRecord({'de': u'Haftungsausschluss'}, {'de': u'Test'}) ], glossaries=[GlossaryRecord({'de': u'Glossar'}, {'de': u'Test'})], general_information={'de': u'Allgemeine Informationen'}, certification={'de': u'certification'}, certification_at_web={'de': u'certification_at_web'}, ) # extract.qr_code = 'VGhpcyBpcyBub3QgYSBRUiBjb2Rl'.encode('utf-8') TODO: # qr_code Must be an image ('base64Binary'), but even with images xml validation # fails on it. # extract.electronic_signature = 'Signature' # TODO: fix signature rendering first return extract
def init_transistor(self, **kwargs): if kwargs['output'].scheme == 'file' and \ kwargs['output'].netloc == '-': del self.channels['output'] drain = self.init_stream_drain(**kwargs) elif kwargs['output'].scheme.lower() in ZMQ_TRANSPORTS: drain = self.init_zmq_drain(**kwargs) elif kwargs['output'].scheme == 'kafka': del self.channels['output'] drain = self.init_kafka_drain(**kwargs) elif kwargs['output'].scheme == 'sqs': del self.channels['output'] drain = self.init_sqs_drain(**kwargs) else: raise ValueError('Unsupported drain scheme: {}'.format( kwargs['output'].scheme)) # The gate "has" a drain; # a source "has" a gate resolver = DottedNameResolver() transducer = resolver.maybe_resolve(kwargs['transducer']) if kwargs['transducer_config']: transducer = transducer(*kwargs['transducer_config']) kwargs['gate'] = Gate( self.logger, self.loop, drain, transducer, ) if not kwargs['input'][0].scheme and kwargs['input'][0].path == '-': del self.channels['input'] source = self.init_stream_source(**kwargs) elif kwargs['input'][0].scheme == 'file': del self.channels['input'] source = self.init_pailfile_source(**kwargs) elif kwargs['input'][0].scheme.lower() in ZMQ_TRANSPORTS: source = self.init_zmq_source(**kwargs) elif kwargs['input'][0].scheme == 'kafka': del self.channels['input'] source = self.init_kafka_source(**kwargs) elif kwargs['input'][0].scheme == 'sqs': del self.channels['input'] source = self.init_sqs_source(**kwargs) else: raise ValueError('Unsupported source scheme: {}'.format( kwargs['input'].scheme)) return Transistor( self.logger, self.loop, kwargs['gate'], source, drain, )
def __init__( self, registry=None, package=None, settings=None, root_factory=None, security_policy=None, authentication_policy=None, authorization_policy=None, renderers=None, debug_logger=None, locale_negotiator=None, request_factory=None, response_factory=None, default_permission=None, session_factory=None, default_view_mapper=None, autocommit=False, exceptionresponse_view=default_exceptionresponse_view, route_prefix=None, introspection=True, root_package=None, ): if package is None: package = caller_package() if root_package is None: root_package = package name_resolver = DottedNameResolver(package) self.name_resolver = name_resolver self.package_name = name_resolver.get_package_name() self.package = name_resolver.get_package() self.root_package = root_package self.registry = registry self.autocommit = autocommit self.route_prefix = route_prefix self.introspection = introspection if registry is None: registry = Registry(self.package_name) self.registry = registry self.setup_registry( settings=settings, root_factory=root_factory, authentication_policy=authentication_policy, authorization_policy=authorization_policy, security_policy=security_policy, renderers=renderers, debug_logger=debug_logger, locale_negotiator=locale_negotiator, request_factory=request_factory, response_factory=response_factory, default_permission=default_permission, session_factory=session_factory, default_view_mapper=default_view_mapper, exceptionresponse_view=exceptionresponse_view, )
def get_callbacks(name, scenes, settings): resolver = DottedNameResolver() callbacks = {} for scene in scenes: try: callback = settings.get(f"{CONFIG_PREFIX}.{name}.callback.{scene}", None) callbacks[scene] = resolver.resolve(callback) except Exception: pass return callbacks
def scan_models(module): """ Scan a models module to force Model registration. Argument `module` can be a models module or a Python dotted string. """ resolver = DottedNameResolver() module = resolver.maybe_resolve(module) scanner = venusian.Scanner() scanner.scan(module)
def get_storage_impl(settings): """ Get and configure the storage backend wrapper """ resolver = DottedNameResolver(__name__) storage = settings.get('pypi.storage', 'file') if storage == 's3': storage = 'pypicloud.storage.S3Storage' elif storage == 'file': storage = 'pypicloud.storage.FileStorage' storage_impl = resolver.resolve(storage) kwargs = storage_impl.configure(settings) return partial(storage_impl, **kwargs)
def get_cache_impl(settings): """Get the cache class from settings""" resolver = DottedNameResolver(__name__) dotted_cache = settings.get("pypi.db", "sql") if dotted_cache == "sql": dotted_cache = "pypicloud.cache.SQLCache" elif dotted_cache == "redis": dotted_cache = "pypicloud.cache.RedisCache" elif dotted_cache == "dynamo": dotted_cache = "pypicloud.cache.dynamo.DynamoCache" return resolver.resolve(dotted_cache)
def __init__(self, ipaddrs, userid=None, principals=None, proxies=None, get_userid=None, get_principals=None): r = DottedNameResolver() self.get_userid = r.maybe_resolve(get_userid) self.get_principals = r.maybe_resolve(get_principals) self.ipaddrs = make_ip_set(ipaddrs) self.userid = userid if isinstance(principals, basestring): self.principals = aslist(principals) else: self.principals = principals self.proxies = make_ip_set(proxies)
def get_cache_impl(settings): """ Get the cache class from settings """ resolver = DottedNameResolver(__name__) dotted_cache = settings.get('pypi.db', 'sql') if dotted_cache == 'sql': dotted_cache = 'pypicloud.cache.SQLCache' elif dotted_cache == 'redis': dotted_cache = 'pypicloud.cache.RedisCache' elif dotted_cache == 'dynamo': dotted_cache = 'pypicloud.cache.dynamo.DynamoCache' return resolver.resolve(dotted_cache)
def get_cache_impl(settings): """ Get the cache class from settings """ resolver = DottedNameResolver(__name__) dotted_cache = settings.get("pypi.db", "sql") if dotted_cache == "sql": dotted_cache = "pypicloud.cache.SQLCache" elif dotted_cache == "redis": dotted_cache = "pypicloud.cache.RedisCache" elif dotted_cache == "dynamo": dotted_cache = "pypicloud.cache.dynamo.DynamoCache" return resolver.resolve(dotted_cache)
def test_embeddable(params): renderer = Renderer(DummyRenderInfo()) renderer._language = u'de' renderer._params = params date = datetime.datetime.now() view_service = ViewServiceRecord(u'http://geowms.bl.ch', 1, 1.0, {'de': u'http://geowms.bl.ch'}, None) real_estate = RealEstateRecord( u'RealEstate', u'BL', u'Liestal', 2829, 11395, MultiPolygon([Polygon([(0, 0), (1, 1), (1, 0)])]), u'http://www.geocat.ch', u'1000', u'BL0200002829', u'CH775979211712' ) real_estate.plan_for_land_register = view_service resolver = DottedNameResolver() date_method_string = Config.get('extract').get('base_data').get('methods').get('date') date_method = resolver.resolve(date_method_string) av_update_date = date_method(real_estate) av_provider_method_string = Config.get('extract').get('base_data').get('methods').get('provider') av_provider_method = resolver.resolve(av_provider_method_string) cadaster_state = date theme = ThemeRecord(u'TEST', {u'de': u'TEST TEXT'}) datasources = [DatasourceRecord(theme, date, OfficeRecord({u'de': u'Test Office'}))] plr_cadastre_authority = Config.get_plr_cadastre_authority() embeddable = EmbeddableRecord( cadaster_state, plr_cadastre_authority, av_provider_method(real_estate), av_update_date, datasources ) result = renderer.format_embeddable(embeddable) assert result == { 'cadasterOrganisationName': plr_cadastre_authority.name.get('de'), 'datasource': [{ 'topic': { 'Text': {'Text': u'TEST TEXT', 'Language': 'de'}, 'Code': 'TEST' }, 'dataownerName': u'Test Office', 'transferFromSource': date.strftime('%d-%m-%YT%H:%M:%S') }], 'cadasterState': cadaster_state.strftime('%d-%m-%YT%H:%M:%S'), 'dataOwnerNameCadastralSurveying': u'This is only a dummy', 'transferFromSourceCadastralSurveying': av_update_date.strftime('%d-%m-%YT%H:%M:%S') }
def get_pwreset_class(registry): """Looks up the password reset class to use within a Pyramid configuration registry. It will return the PasswordReset class if none is set. """ pwreset_class = PasswordReset if hasattr(registry, 'settings'): pwreset_class_path = registry.settings.get( 'speak_friend.pwreset_class', pwreset_class) resolver = DottedNameResolver() pwreset_class = resolver.maybe_resolve(pwreset_class_path) return pwreset_class