def get_mappings(): from guillotina import app_settings mapping_overrides = app_settings.get('elasticsearch', {}).get('mapping_overrides', {}) # Mapping calculated from schemas mappings = {} base_type_overrides = mapping_overrides.get('*', {}) for name, _ in get_utilities_for(IResourceFactory): # For each type type_overrides = base_type_overrides.copy() type_overrides.update(mapping_overrides.get(name, {})) for field_name, catalog_info in get_index_fields(name).items(): index_name = catalog_info.get('index_name', field_name) catalog_type = catalog_info.get('type', 'text') field_mapping = catalog_info.get('field_mapping', None) if field_mapping is None: field_mapping = CATALOG_TYPES[catalog_type].copy() if 'store' in catalog_info: field_mapping['store'] = catalog_info['store'] if index_name in type_overrides: field_mapping = type_overrides[index_name] mappings[index_name] = field_mapping return { 'properties': mappings, 'dynamic': False, '_all': { 'enabled': False } }
async def default_get(context, request): """We show the available schemas.""" result = {} factory = get_cached_factory(context.type_name) result['static'] = [] for schema in factory.behaviors or (): result['static'].append(schema.__identifier__) # convert to list, could be frozenset result['dynamic'] = [b for b in context.__behaviors__] result['available'] = [] factory = get_cached_factory(context.type_name) for name, utility in get_utilities_for(IBehavior): serialize = False if name not in result['dynamic'] and name not in result['static']: adaptable = query_adapter( context, utility.interface, name='', default=None) if adaptable: result['available'].append(name) serialize = True schema_serializer = get_multi_adapter( (utility.interface, request), ISchemaSerializeToJson) result[name] = await schema_serializer() else: serialize = True if serialize: schema_serializer = get_multi_adapter( (utility.interface, request), ISchemaSerializeToJson) result[name] = await schema_serializer() return result
def iter_indexes( invalidate=False) -> typing.Iterator[typing.Tuple[str, typing.Dict]]: """ { "access_users": ["root"], "uuid":"a037df9fa3624b5fb09dbda1480f8210", "contributors":null, "created":"2017-03-16T08:46:00.633690-05:00", "portal_type":"Folder", "title":"Posts", "modified":"2017-03-16T08:46:00.633690-05:00", "depth":2, "subjects":null, "path":"/container/posts", "creators":null, "access_roles":["guillotina.SiteAdmin"], "parent_uuid":"8406d8b94d0e47bfa6cb0a82e531216b" } """ if invalidate: _cached_indexes.clear() if len(_cached_indexes) > 0: for f, v in _cached_indexes.items(): yield f, v found: typing.List[str] = [] for type_name, schema in get_utilities_for(IResourceFactory): for field_name, catalog_info in get_index_fields(type_name).items(): if field_name in found: continue yield field_name, catalog_info found.append(field_name) _cached_indexes[field_name] = catalog_info
async def default_get(context, request): """We show the available schemas.""" result = {} factory = get_cached_factory(context.type_name) result["static"] = [] for schema in factory.behaviors or (): result["static"].append(schema.__identifier__) # convert to list, could be frozenset result["dynamic"] = [b for b in context.__behaviors__] result["available"] = [] factory = get_cached_factory(context.type_name) for name, utility in get_utilities_for(IBehavior): serialize = False if name not in result["dynamic"] and name not in result["static"]: adaptable = query_adapter(context, utility.interface, name="", default=None) if adaptable: result["available"].append(name) serialize = True schema_serializer = get_multi_adapter( (utility.interface, request), ISchemaSerializeToJson) result[name] = await schema_serializer() else: serialize = True if serialize: schema_serializer = get_multi_adapter((utility.interface, request), ISchemaSerializeToJson) result[name] = await schema_serializer() return result
def includeme(root): configure.scan("guillotina_elasticsearch.utility") configure.scan("guillotina_elasticsearch.manager") configure.scan("guillotina_elasticsearch.parser") # add store true to guillotina indexes for name, utility in get_utilities_for(IResourceFactory): if not get_dotted_name(utility._callable).startswith("guillotina."): continue for field_name, catalog_info in get_index_fields(name).items(): if field_name in ( "id", "path", "uuid", "type_name", "tid", "creators", "contributors", "access_roles", "access_users", "parent_uuid", "title", "creation_date", "modification_date", "tags", ): catalog_info["store"] = True
async def get_all_types(context, request): types = [x[1] for x in get_utilities_for(IResourceFactory)] result = [] for x in types: serializer = get_multi_adapter((x, request), IFactorySerializeToJson) result.append(await serializer()) return result
def get_mappings(schemas=None, schema_info=False): if schemas is None: schemas = [] for name, _ in get_utilities_for(IResourceFactory): # For each type for schema in get_all_possible_schemas_for_type(name): schemas.append(schema) schemas = set(schemas) mappings = {} schema_field_mappings = {} for schema in schemas: index_fields = merged_tagged_value_dict( schema, guillotina.directives.index.key) for field_name, catalog_info in index_fields.items(): index_name = catalog_info.get('index_name', field_name) catalog_type = catalog_info.get('type', 'text') field_mapping = catalog_info.get('field_mapping', None) if field_mapping is None: field_mapping = CATALOG_TYPES[catalog_type].copy() if 'store' in catalog_info: field_mapping['store'] = catalog_info['store'] if schema_info: if '_schemas' not in field_mapping: field_mapping['_schemas'] = [] if schema.__identifier__ not in field_mapping['_schemas']: field_mapping['_schemas'].append(schema.__identifier__) if (index_name in mappings and mappings[index_name] != field_mapping): existing_addon_idx = _addon_index( schema_field_mappings[index_name]) field_addon_idx = _addon_index(catalog_info['__schema__']) if existing_addon_idx > field_addon_idx: # we're keeping current value continue elif existing_addon_idx == field_addon_idx: # we are customizing same field mapping in same addon! # this should not be allowed raise Exception( f'''Unresolvable index mapping conflict: {index_name} Registered schema: {schema_field_mappings[index_name].__identifier__} Registered mapping: {mappings[index_name]} Conflicted schema: {catalog_info['__schema__'].__identifier__} Registered mapping: {field_mapping} ''') schema_field_mappings[index_name] = catalog_info['__schema__'] mappings[index_name] = field_mapping return { 'properties': mappings, 'dynamic': False, '_all': { 'enabled': False } }
def _iter_model_indices(): for type_name, schema in get_utilities_for(IResourceFactory): for field_name, catalog_info in get_index_fields(type_name).items(): if field_name in FIELD_DIRECTIVES_TO_GRAPHQL: yield type_name, field_name, { "gql_type": FIELD_DIRECTIVES_TO_GRAPHQL[field_name] } else: yield type_name, field_name, catalog_info
async def stop_active_consumers(self): for name, worker in get_utilities_for(IActiveConsumer): if hasattr(worker, '__consumer__') and not getattr( worker, '__stopped__', False): try: logging.warning(f'Stopping {name} consumer') await worker.__consumer__.stop() worker.__stopped__ = True except Exception: logger.warning(f"Error stopping consumer: {name}", exc_info=True)
def load_cached_schema(): for x in get_utilities_for(IResourceFactory): factory = x[1] if factory.type_name not in SCHEMA_CACHE: FACTORY_CACHE[factory.type_name] = factory behaviors_registrations = [] for iface in factory.behaviors or (): if Interface.providedBy(iface): name = iface.__identifier__ else: name = iface behaviors_registrations.append(get_utility(IBehavior, name=name)) SCHEMA_CACHE[factory.type_name] = {"behaviors": behaviors_registrations, "schema": factory.schema} for iface, utility in get_utilities_for(IBehavior): if isinstance(iface, str): name = iface elif Interface.providedBy(iface): name = iface.__identifier__ if name not in BEHAVIOR_CACHE: BEHAVIOR_CACHE[name] = utility.interface
def get_all_possible_schemas_for_type(type_name): result = set() factory = get_cached_factory(type_name) if factory.schema is not None: result.add(factory.schema) for schema in factory.behaviors or (): result.add(schema) for iface, utility in get_utilities_for(IBehavior): if utility.for_.isEqualOrExtendedBy(factory.schema): result.add(utility.interface) return [b for b in result]
def __init__(self): self.data_adapter = DefaultCatalogDataAdapter(None) self.mappings = {} for type_name, schema in get_utilities_for(IResourceFactory): self.mappings[type_name] = {} for schema in iter_schemata_for_type(type_name): for field_name, index_data in merged_tagged_value_dict( schema, directives.index.key).items(): index_name = index_data.get('index_name', field_name) self.mappings[type_name][index_name] = { 'schema': schema, 'properties': index_data }
def includeme(root): configure.scan('guillotina_elasticsearch.utility') configure.scan('guillotina_elasticsearch.manager') # add store true to guillotina indexes for name, utility in get_utilities_for(IResourceFactory): if not get_dotted_name(utility._callable).startswith('guillotina.'): continue for field_name, catalog_info in get_index_fields(name).items(): if field_name in ('id', 'path', 'uuid', 'type_name', 'tid', 'creators', 'contributors', 'access_roles', 'access_users', 'parent_uuid', 'title'): catalog_info['store'] = True
def get_mappings(schemas=None, schema_info=False): if schemas is None: schemas = [] for name, _ in get_utilities_for(IResourceFactory): # For each type for schema in get_all_possible_schemas_for_type(name): schemas.append(schema) schemas = set(schemas) mappings = {} schema_field_mappings = {} for schema in schemas: index_fields = merged_tagged_value_dict( schema, guillotina.directives.index.key) for field_name, catalog_info in index_fields.items(): index_name = catalog_info.get("index_name", field_name) catalog_type = catalog_info.get("type", "text") field_mapping = catalog_info.get("field_mapping", None) if field_mapping is None: field_mapping = CATALOG_TYPES[catalog_type].copy() if "store" in catalog_info: field_mapping["store"] = catalog_info["store"] if schema_info: if "_schemas" not in field_mapping: field_mapping["_schemas"] = [] if schema.__identifier__ not in field_mapping["_schemas"]: field_mapping["_schemas"].append(schema.__identifier__) if index_name in mappings and mappings[index_name] != field_mapping: existing_addon_idx = _addon_index( schema_field_mappings[index_name]) field_addon_idx = _addon_index(catalog_info["__schema__"]) if existing_addon_idx > field_addon_idx: # we're keeping current value continue elif existing_addon_idx == field_addon_idx: # we are customizing same field mapping in same addon! # this should not be allowed raise Exception( f"""Unresolvable index mapping conflict: {index_name} Registered schema: {schema_field_mappings[index_name].__identifier__} Registered mapping: {mappings[index_name]} Conflicted schema: {catalog_info['__schema__'].__identifier__} Registered mapping: {field_mapping} """) schema_field_mappings[index_name] = catalog_info["__schema__"] mappings[index_name] = field_mapping return {"properties": mappings}
async def migrate(self, db): migrations = sorted(get_utilities_for(IMigration), key=lambda v: StrictVersion(v[0])) async with transaction(db=db) as txn: # make sure to get fresh copy txn._manager._hard_cache.clear() root = await db.get_root() current_version = StrictVersion(root.migration_version) for version, migration in migrations: if StrictVersion(version) > current_version: logger.warning(f"Starting migration on db {version}: {db.id}") await migration(db) logger.warning(f"Finished migration on db {version}: {db.id}") root.migration_version = version txn.register(root)
def apply_concrete_behaviors(): ''' Configured behaviors for an object should always be applied and can't be removed. Should be called once at startup instead of doing alsoProvides every time an object is created ''' for type_name, factory in get_utilities_for(IResourceFactory): for behavior in factory.behaviors: behavior_registration = get_utility( IBehavior, name=behavior.__identifier__) if behavior_registration.marker is not None: classImplements(factory._callable, behavior_registration.marker)
def get_metadata(): global METADATA_CACHE if METADATA_CACHE is None: mapping = [] for type_name, type_schema in FACTORY_CACHE.items(): mapping.extend( merged_tagged_value_list(type_schema.schema, metadata.key)) for _, utility in get_utilities_for(IBehavior): mapping.extend( merged_tagged_value_list(utility.interface, metadata.key)) METADATA_CACHE = mapping else: METADATA_CACHE return METADATA_CACHE
async def migrate(self, db): migrations = sorted( get_utilities_for(IMigration)) self.request._tm = db.get_transaction_manager() async with managed_transaction(self.request, write=True) as txn: # make sure to get fresh copy txn._manager._hard_cache.clear() root = await db.get_root() current_version = StrictVersion(root.migration_version) for version, migration in migrations: if StrictVersion(version) > current_version: logger.warning(f'Starting migration on db {version}: {db.id}') await migration(db) logger.warning(f'Finished migration on db {version}: {db.id}') root.migration_version = version txn.register(root)
async def __call__(self): if not hasattr(self, 'value'): self.value = [x[1] for x in get_utilities_for(IResourceFactory)] if isinstance(self.value, list): result = [] for x in self.value: serializer = get_multi_adapter((x, self.request), IFactorySerializeToJson) result.append(await serializer()) else: serializer = get_multi_adapter((self.value, self.request), IFactorySerializeToJson) result = await serializer() return result
def fhir_field_from_resource_type( resource_type: str, cache: bool = True ) -> Union[dict, None]: """ """ global _RESOURCE_TYPE_TO_FHIR_FIELD_CACHE if cache and resource_type in _RESOURCE_TYPE_TO_FHIR_FIELD_CACHE: return _RESOURCE_TYPE_TO_FHIR_FIELD_CACHE[resource_type] # validate_resource_type(resource_type) klass_path = lookup_fhir_class_path(resource_type) if klass_path is None: raise Invalid(f"{resource_type} is not valid FHIR Resource") factories = [x[1] for x in get_utilities_for(IResourceFactory)] fields: dict = {} for factory in factories: field = fhir_field_from_schema(factory.schema, resource_type) if field is not None: if field.getName() not in fields: fields[field.getName()] = {"field": field, "types": list()} if factory.type_name not in fields[field.getName()]["types"]: fields[field.getName()]["types"].append(factory.type_name) break # Try find from behavior for schema in factory.behaviors or (): field = fhir_field_from_schema(schema) if field is not None: if field.__name__ not in fields: fields[field.__name__] = {"field": field, "types": list()} if factory.type_name not in fields[field.__name__]["types"]: fields[field.__name__]["types"].append(factory.type_name) if fields: # xxx: do validation over multiple fields or other stuff? _RESOURCE_TYPE_TO_FHIR_FIELD_CACHE[resource_type] = fields return _RESOURCE_TYPE_TO_FHIR_FIELD_CACHE[resource_type] return None
async def get_all_indices(context, request): base_url = IAbsoluteURL(context, request)() result = {"@id": base_url, "types": {}, "behaviors": {}} for type_name, type_schema in FACTORY_CACHE.items(): indices = merged_tagged_value_dict(type_schema.schema, index.key) result["types"][type_name] = { key: value["type"] for key, value in indices.items() } # noqa for behavior, utility in get_utilities_for(IBehavior): indices = merged_tagged_value_dict(utility.interface, index.key) result["behaviors"][behavior] = { key: value["type"] for key, value in indices.items() } # noqa return result
async def migrate(self, db): migrations = sorted(get_utilities_for(IMigration)) self.request._tm = db.get_transaction_manager() async with managed_transaction(self.request, write=True) as txn: # make sure to get fresh copy txn._manager._hard_cache.clear() root = await db.get_root() current_version = StrictVersion(root.migration_version) for version, migration in migrations: if StrictVersion(version) > current_version: logger.warning( f'Starting migration on db {version}: {db.id}') await migration(db) logger.warning( f'Finished migration on db {version}: {db.id}') root.migration_version = version txn.register(root)
def get_indexes(): """ Get all the indexes """ global INDEXES_CACHE if INDEXES_CACHE is None: mapping = {} for type_name, type_schema in FACTORY_CACHE.items(): mapping.update( merged_tagged_value_dict(type_schema.schema, index.key)) for _, utility in get_utilities_for(IBehavior): mapping.update( merged_tagged_value_dict(utility.interface, index.key)) INDEXES_CACHE = mapping else: INDEXES_CACHE return INDEXES_CACHE
def get_index_fields(): schemas = [] for name, _ in get_utilities_for(IResourceFactory): # For each type for schema in get_all_possible_schemas_for_type(name): schemas.append(schema) schemas = set(schemas) fields = [] for schema in schemas: index_fields = guillotina.directives.merged_tagged_value_dict( schema, guillotina.directives.index.key) for field_name, catalog_info in index_fields.items(): index_name = catalog_info.get('index_name', field_name) if index_name not in fields: fields.append(index_name) return fields
def _is_multi_valued(check_field_name): if len(_stored_multi_valued) == 0: # load types and cache, once for name, _ in get_utilities_for(IResourceFactory): # For each type for schema in get_all_possible_schemas_for_type(name): index_fields = guillotina.directives.merged_tagged_value_dict( schema, guillotina.directives.index.key) for field_name, catalog_info in index_fields.items(): index_name = catalog_info.get('index_name', field_name) try: field = schema[field_name] _stored_multi_valued[index_name] = ICollection.providedBy(field) # noqa except KeyError: _stored_multi_valued[index_name] = False if check_field_name in _stored_multi_valued: return _stored_multi_valued[check_field_name] return False
def get_mappings(schemas=None, schema_info=False): if schemas is None: schemas = [] for name, _ in get_utilities_for(IResourceFactory): # For each type for schema in get_all_possible_schemas_for_type(name): schemas.append(schema) schemas = set(schemas) mappings = {} for schema in schemas: index_fields = guillotina.directives.merged_tagged_value_dict( schema, guillotina.directives.index.key) for field_name, catalog_info in index_fields.items(): index_name = catalog_info.get('index_name', field_name) catalog_type = catalog_info.get('type', 'text') field_mapping = catalog_info.get('field_mapping', None) if field_mapping is None: field_mapping = CATALOG_TYPES[catalog_type].copy() if 'store' in catalog_info: field_mapping['store'] = catalog_info['store'] if schema_info: if '_schemas' not in field_mapping: field_mapping['_schemas'] = [] if schema.__identifier__ not in field_mapping['_schemas']: field_mapping['_schemas'].append(schema.__identifier__) mappings[index_name] = field_mapping return { 'properties': mappings, 'dynamic': False, '_all': { 'enabled': False } }
def check_role(context, role_id): names = [name for name, util in get_utilities_for(IRole, context)] if role_id not in names: raise ValueError(f'Undefined role id "{role_id}"')
async def calculate_mapping_diff(self): ''' all we care about is new fields... Missing ones are ignored and we don't care about it. ''' diffs = {} existing_index_name = await self.utility.get_real_index_name( self.container, self.request) existing_mappings = await self.conn.indices.get_mapping( existing_index_name) existing_mappings = existing_mappings[existing_index_name]['mappings'] next_mappings = await self.conn.indices.get_mapping( self.work_index_name) next_mappings = next_mappings[self.work_index_name]['mappings'] changes = False for type_name in existing_mappings.keys(): if type_name not in next_mappings: # copy over orphaned type otherwise move will potentially not work # any orphaned doc types will need to be manually deleted for now... mapping = existing_mappings[type_name] properties = mapping['properties'] # need to make sure to normalize field definitions so they are inline # with new mappings otherwise you could get conflicting definitions for field_name in properties.keys(): for check_type_name in next_mappings.keys(): if field_name in next_mappings[check_type_name][ 'properties']: properties[field_name] = next_mappings[ check_type_name]['properties'][field_name] break # and install new mapping await self.utility.conn.indices.put_mapping( self.work_index_name, type_name, mapping) changes = True if changes: # we add to the mappings so we need to update... next_mappings = await self.conn.indices.get_mapping( self.work_index_name) next_mappings = next_mappings[self.work_index_name]['mappings'] for type_name, schema in get_utilities_for(IResourceFactory): new_definitions = {} if type_name not in existing_mappings: diffs[type_name] = next_mappings[type_name]['properties'] continue existing_mapping = existing_mappings[type_name]['properties'] next_mapping = next_mappings[type_name]['properties'] for field_name, definition in next_mapping.items(): definition = _clean_mapping(definition) if (field_name not in existing_mapping or definition != _clean_mapping(existing_mapping[field_name])): new_definitions[field_name] = definition if len(new_definitions) > 0: diffs[type_name] = new_definitions for type_name, mapping in existing_mappings.items(): if type_name not in next_mappings: # special case here... we need to import this mapping still # in order for the index copy to work correctly if docs ref it self.response.write( f'Backporting mapping of {type_name} to new ' f'even though it is not defined anymore') await self.conn.indices.put_mapping(self.work_index_name, type_name, mapping) return diffs
def global_roles(): if 'global_roles' in app_settings: return app_settings['global_roles'] names = [name for name, util in get_utilities_for(IRole) if not util.local] app_settings['global_roles'] = names return names
def check_role(context, role_id): names = [name for name, util in get_utilities_for(IRole, context)] if role_id not in names: raise ValueError("Undefined role id", role_id)
def local_roles(): if "local_roles" in app_settings: return app_settings["local_roles"] names = [name for name, util in get_utilities_for(IRole) if util.local] app_settings["local_roles"] = names return names
def get_all_permissions(context=None): """Get the ids of all defined permissions """ for id, permission in get_utilities_for(IPermission, context): if id != 'zope.Public': yield id