def create_dummy_extract(): date = datetime.datetime.now() real_estate = RealEstateRecord( u'test', u'BL', u'Laufen', 2770, 1000, MultiPolygon(), ViewServiceRecord('test_link', 1, 1.0, {'de': 'test_legend'})) plr_office = OfficeRecord({u'en': u'PLR Authority'}) resolver = DottedNameResolver() date_method_string = Config.get('extract').get('base_data').get( 'methods').get('date') date_method = resolver.resolve(date_method_string) av_update_date = date_method(real_estate) base_data = Config.get_base_data(av_update_date) av_provider_method_string = Config.get('extract').get('base_data').get( 'methods').get('provider') av_provider_method = resolver.resolve(av_provider_method_string) cadaster_state = date theme = ThemeRecord(u'TEST', {u'de': u'TEST TEXT'}) datasources = [DatasourceRecord(theme, date, plr_office)] plr_cadastre_authority = Config.get_plr_cadastre_authority() embeddable = EmbeddableRecord(cadaster_state, plr_cadastre_authority, av_provider_method(real_estate), av_update_date, datasources) record = ExtractRecord(real_estate, ImageRecord('100'.encode('utf-8')), ImageRecord('100'.encode('utf-8')), ImageRecord('100'.encode('utf-8')), ImageRecord('100'.encode('utf-8')), plr_office, base_data, embeddable) return record
def _parse_settings(settings): rawes_args = {} defaults = { 'url': 'http://localhost:9200', 'timeout': 30, 'path': '', 'json_encoder': encode_date_optional_time, } rawes_args = defaults.copy() # set string settings for short_key_name in ('path',): key_name = 'rawes.%s' % (short_key_name,) if key_name in settings: rawes_args[short_key_name] = \ settings.get(key_name, defaults.get(short_key_name)) # set list settings for short_key_name in ('url',): key_name = 'rawes.%s' % (short_key_name,) if key_name in settings: rawes_args[short_key_name] = \ (aslist(settings.get(key_name, defaults.get(short_key_name))) if len(aslist(settings.get(key_name, defaults.get(short_key_name)))) > 1 else settings.get(key_name, defaults.get(short_key_name)).strip()) # integer settings for short_key_name in ('timeout',): key_name = 'rawes.%s' % (short_key_name,) if key_name in settings: rawes_args[short_key_name] = \ int(settings.get(key_name, defaults.get(short_key_name))) # function settings for short_key_name in ('json_encoder',): key_name = 'rawes.%s' % (short_key_name,) r = DottedNameResolver() if key_name in settings: rawes_args[short_key_name] = \ r.resolve(settings.get(key_name)) for short_key_name in ('json_decoder',): key_name = 'rawes.%s' % (short_key_name,) r = DottedNameResolver() if key_name in settings: rawes_args[short_key_name] = \ r.resolve(settings.get(key_name))().decode # removed settings for short_key_name in ('connection_type', 'except_on_error'): key_name = 'rawes.%s' % (short_key_name,) if key_name in settings: warnings.warn( '%s is no longer supported, please remove from your settings.', UserWarning ) return rawes_args
def get_test_extract(): date = datetime.datetime.now() with pyramid_oereb_test_config(): view_service = ViewServiceRecord(u'http://geowms.bl.ch', 1, 1.0, u'http://geowms.bl.ch', None) real_estate = RealEstateRecord(u'RealEstate', u'BL', u'Liestal', 2829, 11395, MultiPolygon([Polygon([(0, 0), (1, 1), (1, 0)])]), u'http://www.geocat.ch', u'1000', u'BL0200002829', u'CH775979211712') real_estate.set_view_service(view_service) real_estate.set_main_page_view_service(view_service) office_record = OfficeRecord({'de': u'AGI'}) resolver = DottedNameResolver() date_method_string = Config.get('extract').get('base_data').get('methods').get('date') date_method = resolver.resolve(date_method_string) av_update_date = date_method(real_estate) base_data = Config.get_base_data(av_update_date) av_provider_method_string = Config.get('extract').get('base_data').get('methods').get('provider') av_provider_method = resolver.resolve(av_provider_method_string) cadaster_state = date theme = ThemeRecord(u'TEST', {'de': u'TEST TEXT'}) datasources = [DatasourceRecord(theme, date, office_record)] plr_cadastre_authority = Config.get_plr_cadastre_authority() embeddable = EmbeddableRecord( cadaster_state, plr_cadastre_authority, av_provider_method(real_estate), av_update_date, datasources ) extract = ExtractRecord( real_estate, ImageRecord('1'.encode('utf-8')), ImageRecord('2'.encode('utf-8')), ImageRecord('3'.encode('utf-8')), ImageRecord('4'.encode('utf-8')), office_record, base_data, embeddable, exclusions_of_liability=[ ExclusionOfLiabilityRecord({'de': u'Haftungsausschluss'}, {'de': u'Test'}) ], glossaries=[GlossaryRecord({'de': u'Glossar'}, {'de': u'Test'})], general_information={'de': u'Allgemeine Informationen'}, certification={'de': u'certification'}, certification_at_web={'de': u'certification_at_web'}, ) # extract.qr_code = 'VGhpcyBpcyBub3QgYSBRUiBjb2Rl'.encode('utf-8') TODO: # qr_code Must be an image ('base64Binary'), but even with images xml validation # fails on it. # extract.electronic_signature = 'Signature' # TODO: fix signature rendering first return extract
def test_embeddable(params): renderer = Renderer(DummyRenderInfo()) renderer._language = u'de' renderer._params = params date = datetime.datetime.now() view_service = ViewServiceRecord(u'http://geowms.bl.ch', 1, 1.0, {'de': u'http://geowms.bl.ch'}, None) real_estate = RealEstateRecord( u'RealEstate', u'BL', u'Liestal', 2829, 11395, MultiPolygon([Polygon([(0, 0), (1, 1), (1, 0)])]), u'http://www.geocat.ch', u'1000', u'BL0200002829', u'CH775979211712' ) real_estate.plan_for_land_register = view_service resolver = DottedNameResolver() date_method_string = Config.get('extract').get('base_data').get('methods').get('date') date_method = resolver.resolve(date_method_string) av_update_date = date_method(real_estate) av_provider_method_string = Config.get('extract').get('base_data').get('methods').get('provider') av_provider_method = resolver.resolve(av_provider_method_string) cadaster_state = date theme = ThemeRecord(u'TEST', {u'de': u'TEST TEXT'}) datasources = [DatasourceRecord(theme, date, OfficeRecord({u'de': u'Test Office'}))] plr_cadastre_authority = Config.get_plr_cadastre_authority() embeddable = EmbeddableRecord( cadaster_state, plr_cadastre_authority, av_provider_method(real_estate), av_update_date, datasources ) result = renderer.format_embeddable(embeddable) assert result == { 'cadasterOrganisationName': plr_cadastre_authority.name.get('de'), 'datasource': [{ 'topic': { 'Text': {'Text': u'TEST TEXT', 'Language': 'de'}, 'Code': 'TEST' }, 'dataownerName': u'Test Office', 'transferFromSource': date.strftime('%d-%m-%YT%H:%M:%S') }], 'cadasterState': cadaster_state.strftime('%d-%m-%YT%H:%M:%S'), 'dataOwnerNameCadastralSurveying': u'This is only a dummy', 'transferFromSourceCadastralSurveying': av_update_date.strftime('%d-%m-%YT%H:%M:%S') }
def test_init(): date = datetime.datetime.now() real_estate = RealEstateRecord(u'test', u'BL', u'Laufen', 2770, 1000, MultiPolygon(), ViewServiceRecord( 'test_link', 'test_legend' )) plr_office = OfficeRecord({u'en': u'PLR Authority'}) resolver = DottedNameResolver() date_method_string = Config.get('extract').get('base_data').get('methods').get('date') date_method = resolver.resolve(date_method_string) av_update_date = date_method(real_estate) base_data = Config.get_base_data(av_update_date) av_provider_method_string = Config.get('extract').get('base_data').get('methods').get('provider') av_provider_method = resolver.resolve(av_provider_method_string) cadaster_state = date theme = ThemeRecord(u'TEST', {u'de': u'TEST TEXT'}) datasources = [DatasourceRecord(theme, date, plr_office)] plr_cadastre_authority = Config.get_plr_cadastre_authority() embeddable = EmbeddableRecord( cadaster_state, plr_cadastre_authority, av_provider_method(real_estate), av_update_date, datasources ) record = ExtractRecord( real_estate, ImageRecord('100'.encode('utf-8')), ImageRecord('100'.encode('utf-8')), ImageRecord('100'.encode('utf-8')), ImageRecord('100'.encode('utf-8')), plr_office, base_data, embeddable ) assert isinstance(record.extract_identifier, str) assert isinstance(record.real_estate, RealEstateRecord) assert isinstance(record.not_concerned_theme, list) assert isinstance(record.concerned_theme, list) assert isinstance(record.theme_without_data, list) assert isinstance(record.creation_date, datetime.date) assert isinstance(record.logo_plr_cadastre, ImageRecord) assert isinstance(record.federal_logo, ImageRecord) assert isinstance(record.cantonal_logo, ImageRecord) assert isinstance(record.municipality_logo, ImageRecord) assert isinstance(record.exclusions_of_liability, list) assert isinstance(record.glossaries, list) assert isinstance(record.plr_cadastre_authority, OfficeRecord) assert isinstance(record.base_data, dict) assert isinstance(record.embeddable, EmbeddableRecord)
def configure_task(task_name): global _config config = _config[task_name] tc = util.prefixed_keys(config, 'kombu.') tc.update(util.prefixed_keys(config, 'task.')) resolver = DottedNameResolver() cls = resolver.resolve(tc['class']) if 'queues' in tc: tc['queues'] = dict( (k, get_queue(q)) for k, q in util.as_dict(tc['queues']).items()) else: tc['exchanges'] = {} if 'exchanges' in tc: tc['exchanges'] = dict( (e, get_exchange(e)) for e in util.as_list(tc['exchanges'])) else: tc['exchanges'] = {} if 'schedule' in tc: tc['schedule'] = _parse_schedule(util.as_dict(tc['schedule'])) if 'rate' in tc: tc['rate'] = { 'rate': float(tc['rate'].strip()), 'capacity': float(tc['capacity']) } cls.configure(**tc) return cls
def get_init(global_config, settings, init_cls=None) -> Initializer: """Get Initializer class instance for WSGI-like app. TODO: Deprecated. Use Pyramid's ``bootstrap()`` instead. Reads reference to the initializer from settings, resolves it and creates the initializer instance. Example 1:: config_uri = argv[1] init = get_init(dict(__file__=config_uri), settings) :param global_config: Global config dictionary, having __file__ entry as given by Paster :param settings: Settings dictionary :param init_cls: Explicitly give the Initializer class to use, otherwise read ``websauna.init`` settings. """ assert "websauna.init" in settings, "You must have websauna.init setting pointing to your Initializer class" assert "__file__" in global_config if not init_cls: init_cls = settings.get("websauna.init") if not init_cls: raise RuntimeError("INI file lacks websauna.init option") resolver = DottedNameResolver() init_cls = resolver.resolve(init_cls) init = init_cls(global_config, settings) return init
def configure_mailer(self, settings): """Configure outgoing email backend based on the INI settings.""" settings = settings.copy() # Empty values are not handled gracefully, so mutate them here before passing forward to mailer if settings.get("mail.username", "x") == "": settings["mail.username"] = None if settings.get("mail.password", "x") == "": settings["mail.password"] = None mailer_class = settings.get("websauna.mailer", "") if mailer_class in ("mail", ""): # TODO: Make mailer_class explicit so we can dynamically load pyramid_mail.Mailer # Default from pyramid_mailer import mailer_factory_from_settings mailer = mailer_factory_from_settings(settings) self.config.registry.registerUtility(mailer, IMailer) else: # debug backend resolver = DottedNameResolver() mailer_cls = resolver.resolve(mailer_class) mailer = mailer_cls() self.config.registry.registerUtility(mailer, IMailer)
def configure_mailer(self): """Configure outgoing email backend and email test views.""" from pyramid_mailer import IMailer settings = self.settings.copy() # Empty values are not handled gracefully, so mutate them here before passing forward to mailer if settings.get("mail.username", "x") == "": settings["mail.username"] = None if settings.get("mail.password", "x") == "": settings["mail.password"] = None mailer_class = settings.get("websauna.mailer", "") if mailer_class in ("mail", ""): # TODO: Make mailer_class explicit so we can dynamically load pyramid_mail.Mailer # Default from pyramid_mailer import mailer_factory_from_settings mailer = mailer_factory_from_settings(settings) self.config.registry.registerUtility(mailer, IMailer) else: # debug backend resolver = DottedNameResolver() mailer_cls = resolver.resolve(mailer_class) mailer = mailer_cls() self.config.registry.registerUtility(mailer, IMailer) if settings.get("websauna.sample_html_email", False): from websauna.system.mail import views self.config.scan(views) self.config.add_jinja2_search_path( 'websauna.system:mail/templates', name='.html')
def debug(request, registry, settings): """Invoke pdb breakpoint from a template. Example: .. code-block:: html+jinja <h1>{{ site_name }}</h1> {{ debug() }} This will invoke function from :ref:`websauna.template_debugger` setting. The debugger is turned on only on :ref:`development.ini`. If there is no debugger configured, nothing happens. """ def _dummy(): return "" template_debugger = settings.get("websauna.template_debugger") if not template_debugger: return _dummy() r = DottedNameResolver() debugger = r.resolve(template_debugger) assert debugger, "Could not find debugger in websauna.template_debugger setting: {}".format(template_debugger) def _inner(): debugger() return "" return _inner
def debug(request, registry, settings): """Invoke pdb breakpoint from a template. Example: .. code-block:: html+jinja <h1>{{ site_name }}</h1> {{ debug() }} This will invoke function from :ref:`websauna.template_debugger` setting. The debugger is turned on only on :ref:`development.ini`. If there is no debugger configured, a warning is given. """ def _dummy(): logger.warn( "{{ debug() }} invoked, but websauna.template_debugger not set") return "" template_debugger = settings.get("websauna.template_debugger") if not template_debugger: debugger = _dummy else: r = DottedNameResolver() debugger = r.resolve(template_debugger) def _inner(): debugger() return "" return _inner
def includeme(config): """Bind to the db engine specifed in ``config.registry.settings``. Setup:: >>> from mock import Mock >>> import pyramid_basemodel >>> _engine_from_config = pyramid_basemodel.engine_from_config >>> _bind_engine = pyramid_basemodel.bind_engine >>> pyramid_basemodel.engine_from_config = Mock() >>> pyramid_basemodel.engine_from_config.return_value = 'engine' >>> pyramid_basemodel.bind_engine = Mock() >>> mock_config = Mock() >>> mock_config.registry.settings = {} Calls ``bind_engine`` with the configured ``engine``:: >>> includeme(mock_config) >>> >>> mock_config.action.assert_called_with(None, ... pyramid_basemodel.bind_engine, ... ('engine',), ... {'should_create': False, 'should_drop': False}) Unless told not to:: >>> pyramid_basemodel.bind_engine = Mock() >>> mock_config = Mock() >>> mock_config.registry.settings = {'basemodel.should_bind_engine': False} >>> includeme(mock_config) >>> mock_config.action.called False Teardown:: >>> pyramid_basemodel.engine_from_config = _engine_from_config >>> pyramid_basemodel.bind_engine = _bind_engine """ # Bind the engine. settings = config.get_settings() engine_kwargs_factory = settings.pop('sqlalchemy.engine_kwargs_factory', None) if engine_kwargs_factory: kwargs_factory = config.maybe_dotted(engine_kwargs_factory) engine_kwargs = kwargs_factory(config.registry) else: engine_kwargs = {} pool_class = settings.pop('sqlalchemy.pool_class', None) if pool_class: dotted_name = DottedNameResolver() engine_kwargs['poolclass'] = dotted_name.resolve(pool_class) should_bind = asbool(settings.get('basemodel.should_bind_engine', True)) should_create = asbool(settings.get('basemodel.should_create_all', False)) should_drop = asbool(settings.get('basemodel.should_drop_all', False)) if should_bind: engine = engine_from_config(settings, 'sqlalchemy.', **engine_kwargs) config.action(None, bind_engine, (engine,), { 'should_create': should_create, 'should_drop': should_drop})
def run(self): """Run the initialization and prepare Pyramid subsystems. This is the main entry for ramping up a Websauna application. We go through various subsystem inits. """ # TODO: Remove passing settings to methods as an argument settings = self.settings self.secrets = self.read_secrets() self.configure_logging(settings) # Serving self.configure_templates() self.configure_static() # Forms self.configure_forms() self.configure_crud(settings) # Email self.configure_mailer(settings) # Timed tasks self.configure_scheduler(settings) self.configure_tasks(settings) # Core view and layout related self.configure_root() self.configure_error_views() self.configure_views() self.configure_panels(settings) self.configure_sitemap(settings) # Website administration self.configure_admin(settings) # Sessions and users self.configure_sessions(settings, self.secrets) self.configure_user(settings, self.secrets) self.configure_model_admins() self.configure_notebook() # Configure addons before anything else, so we can override bits from addon, like template lookup paths, later easily self.configure_addons() # Database # This must be run before configure_database() because SQLAlchemy will resolve @declared_attr and we must have config present by then self.configure_instrumented_models() self.configure_database() # Tests can pass us some extra initialization work on ad hoc extra_init = self.global_config.get("extra_init") if extra_init: resolver = DottedNameResolver() extra_init = resolver.resolve(extra_init) extra_init(self)
def includeme(config): """ Let extdirect be included by config.include(). """ settings = config.registry.settings extdirect_config = dict() names = ("api_path", "router_path", "namespace", "descriptor", "expose_exceptions", "debug_mode", "json_encoder") for name in names: qname = "pyramid_extdirect.{}".format(name) value = settings.get(qname, None) if name == "expose_exceptions" or name == "debug_mode": value = (value == "true") if name == "json_encoder" and value: from pyramid.path import DottedNameResolver resolver = DottedNameResolver() value = resolver.resolve(value) if value is not None: extdirect_config[name] = value extd = Extdirect(**extdirect_config) config.registry.registerUtility(extd, IExtdirect) api_view_perm = settings.get("pyramid_extdirect.api_view_permission") config.add_route('extapi', extd.api_path) config.add_view(api_view, route_name='extapi', permission=api_view_perm) router_view_perm = settings.get("pyramid_extdirect.router_view_permission") config.add_route('extrouter', extd.router_path) config.add_view(router_view, route_name='extrouter', permission=router_view_perm)
def configure_mailer(self): """Configure outgoing email backend and email test views.""" from pyramid_mailer import IMailer settings = self.settings.copy() # Empty values are not handled gracefully, so mutate them here before passing forward to mailer if settings.get("mail.username", "x") == "": settings["mail.username"] = None if settings.get("mail.password", "x") == "": settings["mail.password"] = None mailer_class = settings.get("websauna.mailer", "") if mailer_class in ("mail", ""): # TODO: Make mailer_class explicit so we can dynamically load pyramid_mail.Mailer # Default from pyramid_mailer import mailer_factory_from_settings mailer = mailer_factory_from_settings(settings) self.config.registry.registerUtility(mailer, IMailer) else: # debug backend resolver = DottedNameResolver() mailer_cls = resolver.resolve(mailer_class) mailer = mailer_cls() self.config.registry.registerUtility(mailer, IMailer) if settings.get("websauna.sample_html_email", False): from websauna.system.mail import views self.config.scan(views) self.config.add_jinja2_search_path('websauna.system:mail/templates', name='.html')
def debug(request, registry, settings): """Invoke pdb breakpoint from a template. Example: .. code-block:: html+jinja <h1>{{ site_name }}</h1> {{ debug() }} This will invoke function from :ref:`websauna.template_debugger` setting. The debugger is turned on only on :ref:`development.ini`. If there is no debugger configured, a warning is given. """ def _dummy(): logger.warn("{{ debug() }} invoked, but websauna.template_debugger not set") return "" template_debugger = settings.get("websauna.template_debugger") if not template_debugger: debugger = _dummy else: r = DottedNameResolver() debugger = r.resolve(template_debugger) def _inner(): debugger() return "" return _inner
def test_app_no_login_real_policy(request, test_app_no_perm): """A configured Assembl fixture with permissions and no user logged in""" config = testing.setUp( registry=test_app_no_perm.app.registry, settings=get_config(), ) from ...auth.util import authentication_callback from pyramid.authorization import ACLAuthorizationPolicy from pyramid.path import DottedNameResolver resolver = DottedNameResolver(__package__) auth_policy_name = "assembl.auth.util.UpgradingSessionAuthenticationPolicy" auth_policy = resolver.resolve(auth_policy_name)( callback=authentication_callback) config.set_authorization_policy(ACLAuthorizationPolicy()) config.set_authentication_policy(auth_policy) import transaction # ensure default roles and permissions at startup from ...models import get_session_maker with transaction.manager: session = get_session_maker() from ...lib.migration import bootstrap_db_data bootstrap_db_data(session, False) return test_app_no_perm
def get_storage_impl(settings) -> Callable[[Any], Any]: """ Get and configure the storage backend wrapper """ resolver = DottedNameResolver(__name__) storage = settings.get("pypi.storage", "file") if storage == "azure-blob": if not AZURE_BLOB_IS_AVAILABLE: raise ValueError( "azure-blob storage backend selected but Azure Blob " "Storage is not available. " "Please install the azure-storage-blob library by " "including the `azure-blob` extra in your pip-install step. " "For example: `pip install pypicloud[azure-blob]`") storage = "pypicloud.storage.AzureBlobStorage" elif storage == "s3": storage = "pypicloud.storage.S3Storage" elif storage == "cloudfront": storage = "pypicloud.storage.CloudFrontS3Storage" elif storage == "gcs": if not GCS_IS_AVAILABLE: raise ValueError( "gcs backend selected but GCS is not available. " "Please install the google-cloud-storage library by " "including the `gcs` extra in your pip-install step. " "For example: `pip install pypicloud[gcs]`") storage = "pypicloud.storage.GoogleCloudStorage" elif storage == "file": storage = "pypicloud.storage.FileStorage" storage_impl = resolver.resolve(storage) kwargs = storage_impl.configure(settings) return partial(storage_impl, **kwargs)
def command(self): ini_file = self.args[-1] # pylint: disable=E1101 # TheLMA setup. config = self.__setup_thelma(ini_file) # Initialize the tool and run it. rsv = DottedNameResolver(None) tool_cls = rsv.resolve(self.__target_class.tool) arg_names = [od[1] for od in self.__target_class.option_defs] # Initializing lazy options. We pass the target class and the # options so the callback has access to them. opts = self.options # pylint: disable=E1101 for arg_name in arg_names: arg_value = getattr(opts, arg_name) if isinstance(arg_value, LazyOptionCallback): arg_value = arg_value.initialize(self.__target_class, opts) setattr(opts, arg_name, arg_value) # We only now do a final loop over the options so that lazy option # callbacks get a chance to set dependent option values. kw = {} for arg_name in arg_names: kw[arg_name] = getattr(opts, arg_name) # Remove options that are for command use only. for opt in self.parser.option_list: if opt.dest in kw and opt.pass_to_tool is False: del kw[opt.dest] tool = tool_cls(**kw) try: tool.run() except: transaction.abort() raise else: if tool.has_errors(): err_msgs = tool.get_messages() msg = 'Errors occurred during the tool run. Error messages:\n' raise RuntimeError(msg + os.linesep.join(err_msgs)) warn_msgs = tool.get_messages(logging_level=logging.WARNING) if warn_msgs \ and not self.options.ignore_warnings: # pylint: disable=E1101 msg = 'Warnings occurred during the tool run. You can ' \ 'repeat the run with the --ignore-warnings switch ' \ 'to force changes to be committed. Warning messages:\n' raise RuntimeError(msg + os.linesep.join(warn_msgs)) try: # This gives the tool command a chance to perform actions after # the tool has run. self.__target_class.finalize(tool, opts) except: transaction.abort() raise else: # Create a report of the run. self.__run_report(tool) # All good - check if we should commit. if not self.options.simulate: # pylint: disable=E1101 transaction.commit() else: transaction.abort() config.end()
def _load_module(config, package, module): try: resolver = DottedNameResolver() # log.debug('{0}.{1}'.format(package, module)) prefix = resolver.resolve('{0}.{1}:ROUTE_PREFIX'.format(package, module)) except ImportError, e: prefix = None
def create_cornice_services(path_prefix=''): path = lambda original_path: path_prefix + original_path # noqa resolver = DottedNameResolver() Service.default_filters = [] # /average average = Service('average', path('/average'), renderer='json') average.add_view('get', resolver.resolve('.views.get_averages'), accept='application/json', decorator=multiple('.decorators.pretty', ), schema=resolver.resolve('.schemas.AverageQuerySchema'), permission=None, renderer='jsonp') return [average]
def _load_module(config, package, module): try: resolver = DottedNameResolver() # log.debug('{0}.{1}'.format(package, module)) prefix = resolver.resolve('{0}.{1}:ROUTE_PREFIX'.format( package, module)) except ImportError, e: prefix = None
def includeme(config): """ Set up and configure the pypicloud app """ config.set_root_factory(Root) config.include('pyramid_tm') config.include('pyramid_beaker') config.include('pyramid_duh') config.include('pyramid_duh.auth') config.include('pypicloud.auth') config.include('pypicloud.access') settings = config.get_settings() config.add_renderer('json', json_renderer) # Jinja2 configuration settings['jinja2.filters'] = { 'static_url': 'pyramid_jinja2.filters:static_url_filter', 'tojson': to_json, } settings['jinja2.directories'] = ['pypicloud:templates'] config.include('pyramid_jinja2') # BEAKER CONFIGURATION settings.setdefault('session.type', 'cookie') settings.setdefault('session.httponly', 'true') config.set_session_factory(session_factory_from_settings(settings)) # PYPICLOUD SETTINGS config.registry.fallback_url = settings.get('pypi.fallback_url', 'http://pypi.python.org/simple') config.registry.use_fallback = asbool(settings.get('pypi.use_fallback', True)) realm = settings.get('pypi.realm', 'pypi') config.registry.realm = realm # CACHING DATABASE SETTINGS resolver = DottedNameResolver(__name__) dotted_cache = settings.get('pypi.db', 'sql') if dotted_cache == 'sql': dotted_cache = 'pypicloud.cache.SQLCache' elif dotted_cache == 'redis': dotted_cache = 'pypicloud.cache.RedisCache' cache_impl = resolver.resolve(dotted_cache) cache_impl.configure(config) cache_impl.reload_if_needed() config.add_request_method(cache_impl, name='db', reify=True) # Special request methods config.add_request_method(_app_url, name='app_url') config.add_request_method(lambda x: __version__, name='pypicloud_version', reify=True) cache_max_age = int(settings.get('pyramid.cache_max_age', 3600)) config.add_static_view(name='static/%s' % __version__, path='pypicloud:static', cache_max_age=cache_max_age)
def deserialize(self, node, cstruct): if cstruct is colander.null or cstruct is None: return colander.null resolver = DottedNameResolver() try: return resolver.resolve(cstruct) except ImportError as e: raise Invalid(node, str(e))
def parse_asset_settings(settings): config = configparser.SafeConfigParser() asset_resolver = AssetResolver() dotted_resolver = DottedNameResolver() asset_config = settings.get('assets.config') print asset_config try: s = asset_resolver.resolve(asset_config).abspath() config.read(s) except AttributeError: raise try: store_locally = config.getboolean('assets', 'store_locally') except configparser.NoSectionError: try: with open(asset_config) as fp: config.readfp(fp) except IOError: raise else: store_locally = config.getboolean('assets', 'store_locally') result = dict( store_locally=store_locally, tmp_path=asset_resolver.resolve(config.get('assets', 'tmp_path')).abspath(), save_path=asset_resolver.resolve(config.get('assets', 'save_path')).abspath() ) c = config.items('assets:local') for key, value in c: # Skip any urls since they don't need to be resolved # TODO: Might produce bugs if module name starts with `http` if value.startswith('http'): result[key] = value continue try: value = asset_resolver.resolve(value).abspath() except ValueError: # This gets raised if the name isn't in dotted notation pass except ImportError: # This gets raised if there's ":" in the value but it's not a module pass finally: result[key] = value if not store_locally: c = dict(config.items('assets:cloud')) c['service'] = dotted_resolver.resolve(c.get('service')) result.update(c) return result
def get_storage_impl(settings): """ Get and configure the storage backend wrapper """ resolver = DottedNameResolver(__name__) storage = settings.get('pypi.storage', 'file') if storage == 's3': storage = 'pypicloud.storage.S3Storage' elif storage == 'file': storage = 'pypicloud.storage.FileStorage' storage_impl = resolver.resolve(storage) kwargs = storage_impl.configure(settings) return partial(storage_impl, **kwargs)
def get_callbacks(name, scenes, settings): resolver = DottedNameResolver() callbacks = {} for scene in scenes: try: callback = settings.get(f"{CONFIG_PREFIX}.{name}.callback.{scene}", None) callbacks[scene] = resolver.resolve(callback) except Exception: pass return callbacks
def get_cache_impl(settings): """Get the cache class from settings""" resolver = DottedNameResolver(__name__) dotted_cache = settings.get("pypi.db", "sql") if dotted_cache == "sql": dotted_cache = "pypicloud.cache.SQLCache" elif dotted_cache == "redis": dotted_cache = "pypicloud.cache.RedisCache" elif dotted_cache == "dynamo": dotted_cache = "pypicloud.cache.dynamo.DynamoCache" return resolver.resolve(dotted_cache)
def get_cache_impl(settings): """ Get the cache class from settings """ resolver = DottedNameResolver(__name__) dotted_cache = settings.get("pypi.db", "sql") if dotted_cache == "sql": dotted_cache = "pypicloud.cache.SQLCache" elif dotted_cache == "redis": dotted_cache = "pypicloud.cache.RedisCache" elif dotted_cache == "dynamo": dotted_cache = "pypicloud.cache.dynamo.DynamoCache" return resolver.resolve(dotted_cache)
def get_cache_impl(settings): """ Get the cache class from settings """ resolver = DottedNameResolver(__name__) dotted_cache = settings.get('pypi.db', 'sql') if dotted_cache == 'sql': dotted_cache = 'pypicloud.cache.SQLCache' elif dotted_cache == 'redis': dotted_cache = 'pypicloud.cache.RedisCache' elif dotted_cache == 'dynamo': dotted_cache = 'pypicloud.cache.dynamo.DynamoCache' return resolver.resolve(dotted_cache)
def get_callbacks(name, scenes, settings): resolver = DottedNameResolver() callbacks = {} for scene in scenes: try: callback = settings.get( f"{CONFIG_PREFIX}.{name}.callback.{scene}", None ) callbacks[scene] = resolver.resolve(callback) except Exception: pass return callbacks
def _add_referencer(registry): """ Gets the Referencer from config and adds it to the registry. """ referencer = registry.queryUtility(IReferencer) if referencer is not None: return referencer ref = registry.settings['urireferencer.referencer'] url = registry.settings['urireferencer.registry_url'] r = DottedNameResolver() registry.registerUtility(r.resolve(ref)(url), IReferencer) return registry.queryUtility(IReferencer)
def process(self, real_estate, params, sld_url): """ Central processing method to hook in from webservice. Args: real_estate (pyramid_oereb.lib.records.real_estate.RealEstateRecord): The real estate reader to obtain the real estates record. params (pyramid_oereb.views.webservice.Parameter): The parameters of the extract request. sld_url (str): The URL which provides the sld to style and filter the highlight of the real estate. Returns: pyramid_oereb.lib.records.extract.ExtractRecord: The generated extract record. """ log.debug("process() start") municipality = self._municipality_reader_.read(params, real_estate.fosnr)[0] exclusions_of_liability = self._exclusion_of_liability_reader_.read( params) glossaries = self._glossary_reader_.read(params) extract_raw = self._extract_reader_.read(params, real_estate, municipality) extract = self.plr_tolerance_check(extract_raw) resolver = DottedNameResolver() sort_within_themes_method_string = Config.get('extract').get( 'sort_within_themes_method') if sort_within_themes_method_string: sort_within_themes_method = resolver.resolve( sort_within_themes_method_string) extract = sort_within_themes_method(extract) else: log.info( "No configuration is provided for extract sort_within_themes_method;" " no further sorting is applied.") # the selection of view services is done after the tolerance check. This enables us to take # care about the circumstance that after tolerance check plrs will be dismissed which were # recognized as intersecting before. To avoid this the tolerance check is gathering all plrs # intersecting and not intersecting and starts the legend entry sorting after. self.view_service_handling(extract.real_estate, params.images, params.format) extract.exclusions_of_liability = exclusions_of_liability extract.glossaries = glossaries # obtain the highlight wms url and its content only if the parameter full was requested (PDF) if params.flavour == 'full': if Config.get('full_extract_use_sld', True): extract.real_estate.set_highlight_url(sld_url) log.debug("process() done, returning extract.") return extract
def get_storage_impl(settings): """ Get and configure the storage backend wrapper """ resolver = DottedNameResolver(__name__) storage = settings.get("pypi.storage", "file") if storage == "s3": storage = "pypicloud.storage.S3Storage" elif storage == "cloudfront": storage = "pypicloud.storage.CloudFrontS3Storage" elif storage == "file": storage = "pypicloud.storage.FileStorage" storage_impl = resolver.resolve(storage) kwargs = storage_impl.configure(settings) return partial(storage_impl, **kwargs)
def import_module(self): module_name = self.pargs.module[0] self.log.info("Importing module %s" % module_name) resolver = DottedNameResolver() try: module = resolver.resolve(module_name) except Exception as e: self.log.error("Cannot import module: %s" % e) else: mod = Module.inspect(module) mod.save(self.app.mdb) self.log.info("Saved module %s to database" % (mod))
def configure(cls, config): """ Configure the cache method with app settings """ settings = config.get_settings() resolver = DottedNameResolver(__name__) storage = settings.get('pypi.storage', 'file') if storage == 's3': storage = 'pypicloud.storage.S3Storage' elif storage == 'file': storage = 'pypicloud.storage.FileStorage' storage_impl = resolver.resolve(storage) storage_impl.configure(config) cls.storage_impl = storage_impl cls.allow_overwrite = asbool( settings.get('pypi.allow_overwrite', False))
def configure(cls, config): """ Configure the cache method with app settings """ settings = config.get_settings() resolver = DottedNameResolver(__name__) storage = settings.get('pypi.storage', 'file') if storage == 's3': storage = 'pypicloud.storage.S3Storage' elif storage == 'file': storage = 'pypicloud.storage.FileStorage' storage_impl = resolver.resolve(storage) storage_impl.configure(config) cls.storage_impl = storage_impl cls.allow_overwrite = asbool(settings.get('pypi.allow_overwrite', False))
def get_backend_factory(settings): backend = settings.get('pyramid_walrus.backend', 'redis') if backend == 'redis': return RedisFactory(settings) elif backend == 'ledis': return LedisFactory(settings) elif backend == 'rlite': return RLiteFactory(settings) elif backend == 'vedis': return VedisFactory(settings) # custom factory resolver = DottedNameResolver() custom_factory = resolver.resolve(backend) return custom_factory(settings)
def includeme(config): """ Configure the app """ settings = config.get_settings() name_resolver = DottedNameResolver(__package__) config.set_root_factory(Root) add_acl_from_settings(config) config.add_route('auth', '/auth') config.add_view('steward.views.do_auth', route_name='auth', renderer='json', permission=NO_PERMISSION_REQUIRED) config.add_route('check_auth', '/check_auth') config.add_view('steward.views.do_check_auth', route_name='check_auth', renderer='json', permission=NO_PERMISSION_REQUIRED) if not asbool(settings.get('steward.auth.enable')): config.registry.auth_db = DummyAuthDB(config) return auth_db_source = settings.get('steward.auth.db', 'settings') if auth_db_source == 'settings': auth_db_source = 'steward.auth.SettingsAuthDB' elif auth_db_source.endswith('.yaml'): auth_db_source = 'steward.auth.YamlAuthDB' auth_db = name_resolver.resolve(auth_db_source)(config) config.registry.auth_db = auth_db config.set_authentication_policy(config.registry.authentication_policy) config.set_authorization_policy(ACLAuthorizationPolicy()) auth_policy = AuthTktAuthenticationPolicy( settings['steward.cookie.secret'], callback=auth_db.groups, cookie_name=settings.get('steward.cookie.name', 'auth_tkt'), secure=asbool(settings.get('steward.cookie.secure')), timeout=asint(settings.get('steward.cookie.timeout')), reissue_time=asint(settings.get('steward.cookie.reissue_time')), max_age=asint(settings.get('steward.cookie.max_age')), path=settings.get('steward.cookie.path', '/'), http_only=asbool(settings.get('steward.cookie.httponly', True)), wild_domain=asbool(settings.get('steward.cookie.wild_domain', True)), hashalg=settings.get('steward.cookie.hashalg', 'sha512'), debug=asbool(settings.get('steward.cookie.debug', False)), ) config.add_authentication_policy(auth_policy) config.set_default_permission('default') config.add_request_method(unauthenticated_userid, name='userid', reify=True)
def _resolve_children(cls): assert not cls._children_resolved assert cls != Resource module = import_module(cls.__module__) resolver = DottedNameResolver(module) to_update = dict() for key, val in cls.__children__.items(): if isinstance(val, str): try: to_update[key] = getattr(module, val) except AttributeError: to_update[key] = resolver.resolve(val) cls.__children__.update(to_update) cls._children_resolved = True
def get_image(self): """ Returns a response containing the binary image content using the configured "get_symbol_method". Returns: pyramid.response.Response: Response containing the binary image content. """ method = None dnr = DottedNameResolver() for plr in Config.get('plrs'): if str(plr.get('code')).lower() == str(self._request_.matchdict.get('theme_code')).lower(): method = dnr.resolve(plr.get('hooks').get('get_symbol')) break if method: return method(self._request_) log.error('"get_symbol_method" not found') raise HTTPNotFound()
def _register_request_skos_registry(request): ''' Get the :class:`skosprovider.registry.Registry` attached to this request. :param request: The Pyramid request :rtype: :class:`skosprovider.registry.Registry` ''' settings = _parse_settings(request.registry.settings) if 'skosregistry_factory' in settings: r = DottedNameResolver() skos_registry = r.resolve(settings['skosregistry_factory'])(request) else: skos_registry = Registry(instance_scope='threaded_thread') return skos_registry
def includeme(config): """ Get and configure the cache db wrapper """ settings = config.get_settings() resolver = DottedNameResolver(__name__) dotted_cache = settings.get('pypi.db', 'sql') if dotted_cache == 'sql': dotted_cache = 'pypicloud.cache.SQLCache' elif dotted_cache == 'redis': dotted_cache = 'pypicloud.cache.RedisCache' elif dotted_cache == 'dynamo': dotted_cache = 'pypicloud.cache.dynamo.DynamoCache' cache_impl = resolver.resolve(dotted_cache) kwargs = cache_impl.configure(settings) cache = cache_impl(**kwargs) cache.reload_if_needed() config.add_request_method(partial(cache_impl, **kwargs), name='db', reify=True) return cache_impl
def ResolveName(name, base=None, raiseExcp=True): """ Lookup python object by dotted python name. Wraps pyramid.DottedNameResolver. returns object or None """ if not name: return None if not isinstance(name, basestring): return name if not base: base = caller_package() if not raiseExcp: d = DottedNameResolver(base) return d.maybe_resolve(name) d = DottedNameResolver(base) return d.resolve(name)
def __init__(self, ini_file): self.ini_file = ini_file self.settings = get_appsettings(ini_file) pyramid.paster.setup_logging(ini_file) self.commands = {} cmd_paths = self.settings.get('console_commands', '') if not cmd_paths: print "No commands defined in your ini-file." cmd_paths = cmd_paths.split('\n') r = DottedNameResolver() cmd_entries = [r.resolve(p.strip()) for p in cmd_paths if p.strip()] for entry in cmd_entries: if inspect.ismodule(entry): for m in inspect.getmembers(entry, _valid_command): self._register_command(m[1]) elif _valid_command(entry): self._register_command(entry) else: raise TypeError("Command must be a module or a subclass of 'pyramid_command.Command' class")
def _register_global_skos_registry(registry): ''' Build a :class:`skosprovider.registry.Registry` and attach it to the Pyramid registry. :param registry: The Pyramid registry :rtype: :class:`skosprovider.registry.Registry` ''' settings = _parse_settings(registry.settings) if 'skosregistry_factory' in settings: r = DottedNameResolver() skos_registry = r.resolve(settings['skosregistry_factory'])() else: skos_registry = Registry(instance_scope='threaded_global') registry.registerUtility(skos_registry, ISkosRegistry) return registry.queryUtility(ISkosRegistry)
def load_providers(config): resolver = DottedNameResolver() config.registry.oauth2_providers = dict() settings = config.registry.settings clients = config.registry.settings.get(ENABLED_CLIENTS) for client in clients.split(','): scope = settings.get(SCOPE % client) authorize_endpoint = settings.get(AUTHORIZE_ENDPOINT % client) token_endpoint = settings.get(TOKEN_ENDPOINT % client) client_id = settings.get(CLIENT_ID % client) secret = settings.get(SECRET % client) callback = settings.get(CALLBACK % client) if callback: callback = resolver.resolve(callback) # self,name, client_id, secret, authorize_url, access_token_url, callback=None, **kargs provider = Provider(client, client_id, secret, authorize_endpoint, token_endpoint, callback, scope=scope) config.add_oauth2_provider(provider)
def load_providers(config): ''' Loads provider from the config files. Each providers should be enabled in oauth2.clients = ... And each config should be defined as such. For example: ============ oauth2.clients = facebook oauth2.facebook.client_id = client_id oauth2.facebook.secret = secret_text oauth2.facebook.authorize_endpoint = authorization url oauth2.facebook.token_endpoint = token url oauth2.facebook.scope = scope you want to use oauth2.facebook.callback = dotted.string.to.callback:func ''' resolver = DottedNameResolver() config.registry.oauth2_providers = dict() settings = config.registry.settings clients = config.registry.settings.get(ENABLED_CLIENTS) for client in clients.split(','): scope = settings.get(SCOPE % client) authorize_endpoint = settings.get(AUTHORIZE_ENDPOINT % client) token_endpoint = settings.get(TOKEN_ENDPOINT % client) client_id = settings.get(CLIENT_ID % client) secret = settings.get(SECRET % client) callback = settings.get(CALLBACK % client) if callback: callback = resolver.resolve(callback) provider = Provider(client, client_id, secret, authorize_endpoint, token_endpoint, callback, scope=scope) config.add_oauth2_provider(provider)
def includeme(config): """Bind to the db engine specifed in ``config.registry.settings``. Setup:: >>> from mock import Mock >>> import pyramid_basemodel >>> _engine_from_config = pyramid_basemodel.engine_from_config >>> _bind_engine = pyramid_basemodel.bind_engine >>> pyramid_basemodel.engine_from_config = Mock() >>> pyramid_basemodel.engine_from_config.return_value = 'engine' >>> pyramid_basemodel.bind_engine = Mock() >>> mock_config = Mock() >>> mock_config.registry.settings = {} Calls ``bind_engine`` with the configured ``engine``:: >>> includeme(mock_config) >>> pyramid_basemodel.bind_engine.assert_called_with('engine', ... should_create=True, should_drop=False) Teardown:: >>> pyramid_basemodel.engine_from_config = _engine_from_config >>> pyramid_basemodel.bind_engine = _bind_engine """ # Bind the engine. settings = config.registry.settings engine_kwargs = {} pool_class = settings.pop('sqlalchemy.pool_class', None) if pool_class: dotted_name = DottedNameResolver() engine_kwargs['poolclass'] = dotted_name.resolve(pool_class) engine = engine_from_config(settings, 'sqlalchemy.', **engine_kwargs) should_create = asbool(settings.get('basemodel.should_create_all', True)) should_drop = asbool(settings.get('basemodel.should_drop_all', False)) bind_engine(engine, should_create=should_create, should_drop=should_drop)