async def add_initialized(event): type_names = [] behaviors = [] for type_name, definition in app_settings.get("behaviors", {}).items(): create_behaviors_factory(type_name, definition) behaviors.append(type_name) reload_behavior_configuration() for type_name, definition in app_settings.get("contents", {}).items(): create_content_factory(type_name, definition) type_names.append(type_name) reload_content_configuration() for type_name in type_names: # Verify its created if type_name in FACTORY_CACHE: del FACTORY_CACHE[type_name] get_cached_factory(type_name) for proto_name in behaviors: # Verify its created interface_name = "guillotina.contrib.dyncontent.interfaces.I" + proto_name utility = get_utility(IBehavior, name=interface_name) class_interface = import_class(interface_name) assert BEHAVIOR_CACHE[interface_name].__identifier__ == interface_name utility.interface == class_interface
async def generate_validation_token(data, ttl=3660): data = data or {} claims = { "iat": int(time.time()), "exp": int(time.time() + ttl), } claims.update(data) payload = orjson.dumps(claims) jwetoken = jwe.JWE(payload, json_encode({ "alg": "A256KW", "enc": "A256CBC-HS512" })) jwetoken.add_recipient(get_jwk_key()) token = jwetoken.serialize(compact=True) last_time = time.time() + ttl datetime_format = app_settings.get("datetime_format") default_timezone = app_settings.get("default_timezone", "UTC") tz = pytz.timezone(default_timezone) if datetime_format is None: last_date = datetime.fromtimestamp(last_time, tz=tz).isoformat() else: last_date = datetime.fromtimestamp(last_time, tz=tz).strftime(datetime_format) return token, last_date
def get_allow_discussion(self): if self.context.type_name not in app_settings.get( "allow_discussion_types", []): return False if self._allow_discussion is None: return app_settings.get("default_allow_discussion") return self._allow_discussion
async def __call__(self): allowed = app_settings.get("allow_register", False) if allowed is False: raise HTTPUnauthorized(content={"text": "Not allowed registration"}) validator = RecaptchaValidator() status = await validator.validate() if status is False: raise HTTPUnauthorized(content={"text": "Invalid validation"}) payload = await self.request.json() user_id = payload.get("id", None) user = await find_user({"id": user_id}) if user is not None: raise HTTPUnauthorized(content={"text": "Invalid login"}) validation_utility = get_utility(IAuthValidationUtility) if validation_utility is not None: redirect_url = self.request.query.get("redirect_url", None) username = payload.get("fullname", payload.get("id", "")) task_description = f"Registering user {username}" actual_user = get_authenticated_user() await validation_utility.start( as_user=payload.get("id"), from_user=actual_user.id, task_description=task_description, task_id="register_user", email=payload.get("email"), context_description=self.context.title, redirect_url=redirect_url, data=payload, ) else: raise HTTPNotAcceptable()
def update_app_settings(settings): for key, value in settings.items(): if (isinstance(app_settings.get(key), dict) and isinstance(value, dict)): app_settings[key].update(value) else: app_settings[key] = value
async def test_calculate_mapping_diff(es_requester): async with es_requester as requester: container, request, txn, tm = await setup_txn_on_container(requester) search = getUtility(ICatalogUtility) migrator = Migrator(search, container, force=True, request=request) version, new_index_name = await migrator.create_next_index() migrator.work_index_name = new_index_name mappings = get_mappings() index_settings = DEFAULT_SETTINGS.copy() index_settings.update(app_settings.get('index', {})) # tweak mappings so we can get the diff... for key, value in mappings.items(): # need to modify on *all* or it won't work with ES.. if 'creators' in value['properties']: value['properties']['creators']['type'] = 'text' mappings['Item']['properties']['foobar'] = { 'type': 'keyword', 'index': True } await search.conn.indices.close(new_index_name) await search.conn.indices.put_settings(index_settings, new_index_name) for key, value in mappings.items(): await search.conn.indices.put_mapping(new_index_name, key, value) await search.conn.indices.open(new_index_name) diff = await migrator.calculate_mapping_diff() assert len(diff['Folder']) == 1 assert len(diff['Item']) == 2
def get_mappings(): from guillotina import app_settings mapping_overrides = app_settings.get('elasticsearch', {}).get('mapping_overrides', {}) # Mapping calculated from schemas mappings = {} base_type_overrides = mapping_overrides.get('*', {}) for name, _ in get_utilities_for(IResourceFactory): # For each type type_overrides = base_type_overrides.copy() type_overrides.update(mapping_overrides.get(name, {})) for field_name, catalog_info in get_index_fields(name).items(): index_name = catalog_info.get('index_name', field_name) catalog_type = catalog_info.get('type', 'text') field_mapping = catalog_info.get('field_mapping', None) if field_mapping is None: field_mapping = CATALOG_TYPES[catalog_type].copy() if 'store' in catalog_info: field_mapping['store'] = catalog_info['store'] if index_name in type_overrides: field_mapping = type_overrides[index_name] mappings[index_name] = field_mapping return { 'properties': mappings, 'dynamic': False, '_all': { 'enabled': False } }
async def synchronize(self, keys_to_publish): """ publish cache changes on redis """ if self._utility._subscriber is None: # pragma: no cover raise NoPubSubUtility() if app_settings.get("cache", {}).get("updates_channel", None) is None: # pragma: no cover raise NoChannelConfigured() push = {} if self.push_enabled: for obj, pickled in self._stored_objects: val = {"state": pickled, "zoid": obj.__uuid__, "tid": obj.__serial__, "id": obj.__name__} if obj.__of__: ob_key = self.get_key(oid=obj.__of__, id=obj.__name__, variant="annotation") else: if obj.__parent__: ob_key = self.get_key(container=obj.__parent__, id=obj.__name__) else: ob_key = self.get_key(oid=obj.__uuid__) push[ob_key] = val self._stored_objects.clear() self._utility.ignore_tid(self._transaction._tid) await self._utility._subscriber.publish( app_settings["cache"]["updates_channel"], self._transaction._tid, {"tid": self._transaction._tid, "keys": keys_to_publish, "push": push}, )
async def __call__(self): if app_settings.get("graphql", {}).get("enable_playground") is True: return Response( content=PLAYGROUND_HTML, headers={"content-type": "text/html"}, ) raise HTTPNotFound()
async def initialize(self, app=None): from guillotina import app_settings if os.environ.get("SKIP_PGCATALOG_INIT") or app_settings.get("skip_pgcatalog_init", False): return if not app_settings["store_json"]: return root = get_utility(IApplication, name="root") for _id, db in root: if not IDatabase.providedBy(db): continue tm = db.get_transaction_manager() if not IPostgresStorage.providedBy(tm.storage): continue try: async with tm.storage.pool.acquire() as conn: for func in PG_FUNCTIONS: await conn.execute(func) for index in [BasicJsonIndex("container_id")] + [v for v in get_pg_indexes().values()]: sqls = index.get_index_sql(tm.storage) for sql in sqls: logger.debug(f"Creating index:\n {sql}") await conn.execute(sql) except asyncpg.exceptions.ConnectionDoesNotExistError: # pragma: no cover # closed before it could be setup pass except AttributeError as ex: # pragma: no cover if "'reset'" in str(ex): # ignore error removing from pool if already closed return raise
async def test_calculate_mapping_diff(es_requester): async with es_requester as requester: container, request, txn, tm = await setup_txn_on_container(requester) search = getUtility(ICatalogUtility) migrator = Migrator(search, container, force=True, request=request) version, new_index_name = await migrator.create_next_index() migrator.work_index_name = new_index_name mappings = get_mappings() index_settings = DEFAULT_SETTINGS.copy() index_settings.update(app_settings.get('index', {})) # tweak mappings so we can get the diff... if 'creators' in mappings['properties']: mappings['properties']['creators']['type'] = 'text' mappings['properties']['foobar'] = {'type': 'keyword', 'index': True} await search.conn.indices.close(new_index_name) await search.conn.indices.put_settings(body=index_settings, index=new_index_name) await search.conn.indices.put_mapping(index=new_index_name, doc_type=DOC_TYPE, body=mappings) await search.conn.indices.open(new_index_name) diff = await migrator.calculate_mapping_diff() assert len(diff[DOC_TYPE]) == 2
def load_service(_context, service): # prevent circular import from guillotina import app_settings from guillotina.security.utils import protect_view service_conf = service['config'] factory = resolve_dotted_name(service['klass']) permission = service_conf.get( 'permission', app_settings.get('default_permission', None)) protect_view(factory, permission) method = service_conf.get('method', 'GET') default_layer = resolve_dotted_name( app_settings.get('default_layer', IDefaultLayer)) layer = service_conf.get('layer', default_layer) name = service_conf.get('name', '') content = service_conf.get('context', Interface) logger.debug('Defining adapter for ' # noqa '{0:s} {1:s} {2:s} to {3:s} name {4:s}'.format( content.__identifier__, app_settings['http_methods'][method].__identifier__, layer.__identifier__, str(factory), name)) component.adapter( _context, factory=(factory,), provides=app_settings['http_methods'][method], for_=(content, layer), name=name ) api = app_settings['api_definition'] ct_name = content.__identifier__ if ct_name not in api: api[ct_name] = OrderedDict() ct_api = api[ct_name] if name: if 'endpoints' not in ct_api: ct_api['endpoints'] = OrderedDict() if name not in ct_api['endpoints']: ct_api['endpoints'][name] = OrderedDict() ct_api['endpoints'][name][method] = OrderedDict(service_conf) else: ct_api[method] = OrderedDict(service_conf)
def __call__(self): default_result = {} default_blocks = app_settings.get("default_blocks", None) if default_blocks is not None and self.context.type_name in default_blocks: return default_blocks[self.context.type_name].get( "blocks", default_result) else: return default_result
def guillotina(db, guillotina_main, loop): server_settings = app_settings.get("test_server_settings", {}) server = TestServer(guillotina_main, **server_settings) loop.run_until_complete(server.start_server(loop=loop)) requester = GuillotinaDBRequester(server=server, loop=loop) yield requester loop.run_until_complete(server.close())
def get(self, loop=None): if self._conn is None: self._conn = Elasticsearch( loop=loop, **app_settings.get("elasticsearch", {}).get("connection_settings"), ) return self._conn
def get_memory_cache(): global _lru if _lru is None: settings = app_settings.get('cache', { 'memory_cache_size': 209715200 }) _lru = LRU(settings['memory_cache_size']) return _lru
async def install_mappings_on_index(self, index_name): mappings = get_mappings() index_settings = DEFAULT_SETTINGS.copy() index_settings.update(app_settings.get('index', {})) await self.conn.indices.close(index_name) await self.conn.indices.put_settings(index_settings, index_name) for key, value in mappings.items(): await self.conn.indices.put_mapping(index_name, key, value) await self.conn.indices.open(index_name)
def get_allowed_types(self) -> list: tn = getattr(self.context, "__allowed_types__", None) if tn is None: tn = super(CMSCustomAllowedTypes, self).get_allowed_types() if tn is None: tn = FACTORY_CACHE.keys() global_disallowed_types = app_settings.get("global_disallowed_types", []) return [type_ for type_ in tn if type_ not in global_disallowed_types]
async def install_mappings_on_index(self, index_name): mappings = get_mappings() index_settings = DEFAULT_SETTINGS.copy() index_settings.update(app_settings.get('index', {})) await self.conn.indices.close(index_name) await self.conn.indices.put_settings( body=index_settings, index=index_name) await self.conn.indices.put_mapping( doc_type=DOC_TYPE, body=mappings, index=index_name) await self.conn.indices.open(index_name)
async def __call__(self): validator = RecaptchaValidator() status = await validator.validate() if status is False: raise HTTPUnauthorized(content={"text": "Invalid validation"}) auth_providers = app_settings.get("auth_providers", {}) providers = [] if "twitter" in auth_providers: providers.append("twitter") if "facebook" in auth_providers: providers.append("facebook") if "google" in auth_providers: providers.append("google") return { "register": app_settings.get("allow_register", False), "social": providers, "title": self.context.title, }
async def controlpanel(context, request): url = getMultiAdapter((context, request), IAbsoluteURL)() result = [] for item, value in app_settings.get("controlpanels", {}): result.append({ "@id": f"{url}/@controlpanels/{item}", "group": value["group"], "title": value["title"], }) return result
def __init__(self, transaction, loop=None): super().__init__(transaction) self._loop = loop self._conn = None self._redis = None self._memory_cache = cache.get_memory_cache() self._settings = app_settings.get('redis', {}) self._keys_to_publish = [] self._stored_objects = []
async def controlpanel_element(context, request): payload = await request.json() type_id = request.matchdict["type_id"] registry = await get_registry() controlpanels = app_settings.get("controlpanels", {}) if type_id in controlpanels: schema = controlpanels[type_id].get("schema", None) if schema is None: return config = registry.for_interface(schema) for key, value in payload.items(): if key in schema: config.__setitem__(key, value) return
def get(self, loop=None): container = None try: container = task_vars.container.get() except RequestNotFound: return super().get(loop) settings = app_settings.get("elasticsearch", {}).get("connection_settings") if (container is None or container.id != "new_container" or "new_container_settings" not in app_settings["elasticsearch"]): return super().get(loop) else: if self._special_conn is None: settings = settings.copy() settings.update( app_settings["elasticsearch"]["new_container_settings"]) self._special_conn = Elasticsearch(loop=loop, **settings) return self._special_conn
async def _handle(self, request, retries=0): try: return await super()._handle(request) except (ConflictError, TIDConflictError) as e: if app_settings.get('conflict_retry_attempts', 3) > retries: label = 'DB Conflict detected' if isinstance(e, TIDConflictError): label = 'TID Conflict Error detected' tid = getattr(getattr(request, '_txn', None), '_tid', 'not issued') logger.warning( f'{label}, retrying request, tid: {tid}, retries: {retries + 1})' ) request._retry_attempt = retries + 1 return await self._handle(request, retries + 1) logger.error( 'Exhausted retry attempts for conflict error on tid: {}'. format( getattr(getattr(request, '_txn', None), '_tid', 'not issued'))) return aiohttp.web_exceptions.HTTPConflict()
async def controlpanel_element(context, request): url = getMultiAdapter((context, request), IAbsoluteURL)() type_id = request.matchdict["type_id"] registry = await get_registry() result = { "@id": f"{url}/@controlpanels/{type_id}", "group": "General", "title": "Validations Settings", "data": {}, } controlpanels = app_settings.get("controlpanels", {}) if type_id in controlpanels: schema = controlpanels[type_id].get("schema", None) if schema is None: return schemaObj = resolve_dotted_name(schema) config = registry.for_interface(schemaObj) schema = {"properties": {}, "fieldsets": [], "required": []} data = {} fields = [] for name, field in get_fields_in_order(schemaObj): if field.required: result["required"].append(name) serializer = get_multi_adapter((field, schemaObj, request), ISchemaFieldSerializeToJson) schema["properties"][name] = await serializer() data[name] = config.__getitem__(name) fields.append(name) schema["fieldsets"] = [{ "fields": fields, "id": "default", "title": "default" }] result["schema"] = schema result["data"] = data return result
async def get_index_settings(self): index_settings = deepcopy(DEFAULT_SETTINGS) index_settings.update(app_settings.get('index', {})) return index_settings
def get_memory_cache() -> LRU: global _lru if _lru is None: settings = app_settings.get("cache", {"memory_cache_size": 209715200}) _lru = LRU(settings["memory_cache_size"]) return _lru
def settings(self): return app_settings.get('elasticsearch', {})
def settings(self): settings = app_settings.get('mailer', {}) settings.update(self._settings.get('mailer', {})) return settings