def _get(discriminator): """Get the TypeInfo instance for discriminator, that may be any of: type_key: str (the lowercase underscore-separated of domain cls name) workflow: an instance of Workflow, provides IWorkflow interface: provides IInterface domain model: provides IBungeniContent domain model instance: type provides IBungeniContent descriptor: provides IModelDescriptor Raise KeyError if no entry matched. Usage: capi.get_type_info(discriminator) """ if discriminator is None: m = "type_info._get discriminator is None" log.error(m) raise ValueError(m) discri = removeSecurityProxy(discriminator) getter = None # !+IALCHEMISTCONTENT normalize trickier discriminator cases to type_key if IIModelInterface.providedBy(discri): discri = naming.type_key("table_schema_interface_name", discri.__name__) elif IInterface.providedBy(discri): discri = naming.type_key("model_interface_name", discri.__name__) elif type(discri) is type and issubclass(discri, domain.Entity): discri = naming.polymorphic_identity(discri) elif isinstance(discri, domain.Entity): discri = naming.polymorphic_identity(type(discri)) if isinstance(discri, basestring): getter = _get_by_type_key #elif IInterface.providedBy(discri): # getter = _get_by_interface #!+elif interfaces.IBungeniContent.implementedBy(discri): #elif issubclass(discri, domain.Entity): # getter = _get_by_model #!+elif interfaces.IBungeniContent.providedBy(discri): #elif isinstance(discri, domain.Entity): # getter = _get_by_instance elif IWorkflow.providedBy(discri): getter = _get_by_workflow elif IModelDescriptor.implementedBy(discri): getter = _get_by_descriptor_model if getter is not None: ti = getter(discri) if ti is not None: return ti else: m = "No type registered for discriminator: %r" % (discriminator) else: m = "Invalid type info lookup discriminator: %r" % (discriminator) from bungeni.utils import probing log.debug(probing.interfaces(discriminator)) log.debug(m) raise KeyError(m)
def _get(discriminator): """Get the TypeInfo instance for discriminator, that may be any of: type_key: str (the lowercase underscore-separated of domain cls name) workflow: an instance of Workflow, provides IWorkflow interface: provides IInterface domain model: provides IBungeniContent domain model instance: type provides IBungeniContent descriptor: provides IModelDescriptor Raise KeyError if no entry matched. Usage: capi.get_type_info(discriminator) """ if discriminator is None: m = "type_info._get discriminator is None" log.error(m) raise ValueError(m) discri = removeSecurityProxy(discriminator) getter = None # !+IALCHEMISTCONTENT normalize trickier discriminator cases to type_key if IIModelInterface.providedBy(discri): discri = naming.type_key("table_schema_interface_name", discri.__name__) elif IInterface.providedBy(discri): discri = naming.type_key("model_interface_name", discri.__name__) elif type(discri) is type and issubclass(discri, domain.Entity): discri = naming.polymorphic_identity(discri) elif isinstance(discri, domain.Entity): discri = naming.polymorphic_identity(type(discri)) if isinstance(discri, basestring): getter = _get_by_type_key #elif IInterface.providedBy(discri): # getter = _get_by_interface #!+elif interfaces.IBungeniContent.implementedBy(discri): #elif issubclass(discri, domain.Entity): # getter = _get_by_model #!+elif interfaces.IBungeniContent.providedBy(discri): #elif isinstance(discri, domain.Entity): # getter = _get_by_instance elif IWorkflow.providedBy(discri): getter = _get_by_workflow elif IModelDescriptor.implementedBy(discri): getter = _get_by_descriptor_model if getter is not None: ti = getter(discri) if ti is not None: return ti else: m = "No type registered for discriminator: %r" % (discriminator) else: m = "Invalid type info lookup discriminator: %r" % (discriminator) from bungeni.ui.utils import debug log.debug(debug.interfaces(discriminator)) log.debug(m) raise KeyError(m)
def __str__(self): doc = removeSecurityProxy(self.context) base_url = ui_utils.url.absoluteURL(getSite(), self.request) return "%s/admin/content/chambers/obj-%s/%s/%s" % ( base_url, doc.chamber_id, naming.plural(naming.polymorphic_identity( type(doc))), stringKey(doc))
def __init__(self, context): #self.context = context context = zope.security.proxy.removeSecurityProxy(context) # !+ASSUMPTION_SINGLE_COLUMN_PK(mr, may-2012) self.oid = rdb.orm.object_mapper( context).primary_key_from_instance(context)[0] self.object_type = naming.polymorphic_identity(context.__class__)
def get_param(self, name, context=None): default_when = self.whens[(None, None)] if context is not None: when = None context_subtype = naming.polymorphic_identity(type(context)) for subtype, condition_name in self.whens: # !+ what about subs of subtype? if context_subtype == subtype: # !+ what if multiple conditions for same suntype? when = self.whens[(subtype, condition_name)] if condition_name is not None: if when.condition(context): # raises BungeniCustomError break else: break else: for subtype, condition_name in self.whens: if subtype is None: if condition_name is not None: when = self.whens[(None, condition_name)] if when.condition(context): # raises BungeniCustomError break else: when = None if when: return when.params.get(name, default_when.params[name]) return default_when.params[name]
def localize_domain_model_from_descriptor_class(domain_model, descriptor_cls): """Localize the domain model for configuration information in the descriptor i.e. any extended/derived attributes. For any model/descriptor this should be called only once! """ type_key = naming.polymorphic_identity(domain_model) # localize models from descriptors only once! assert type_key not in localize_domain_model_from_descriptor_class.DONE, \ "May not re-localize [%s] domain model from descriptor" % (type_key) localize_domain_model_from_descriptor_class.DONE.append(type_key) #!+GET_ARCHETYPE #!+archetype_key = naming.polymorphic_identity(domain_model.__bases__[0]) multiple inheritance... archetype_key = naming._type_key_from_descriptor_class_name( descriptor_cls.__bases__[0].__name__) for field in descriptor_cls.fields: # extended if field.extended is not None: add_extended_property_to_model(domain_model, field.name, field.extended, archetype_key) # derived if field.derived is not None: add_derived_property_to_model(domain_model, field.name, field.derived) # !+if domain_model.extended_properties: ? # !+instrument_extended_properties, archetype_key => table... instrument_extended_properties(domain_model, archetype_key) mapper_add_relation_vertical_properties(domain_model) # !+AUDIT_EXTENDED_ATTRIBUTES as audit class was created prior to # extended attributes being updated on domain type, need to push onto # it any extended attrs that were read from model's descriptor if interfaces.IFeatureAudit.implementedBy(domain_model): # either defined manually or created dynamically in feature_audit() audit_kls = getattr(MODEL_MODULE, "%sAudit" % (domain_model.__name__)) # propagate any extended attributes on head kls also to its audit_kls import bungeni.models.domain audit_table_name = bungeni.models.domain.get_audit_table_name(domain_model) instrument_extended_properties( audit_kls, audit_table_name, from_class=domain_model) # containers from bungeni.capi import capi for name, target_type_key, rel_attr in descriptor_cls.info_containers: try: tti = capi.get_type_info(target_type_key) except KeyError: # target type not enabled log.warn("Ignoring %r container property %r to disabled type: %s.%s", type_key, name, target_type_key, rel_attr) continue container_qualname = "bungeni.models.domain.%s" % ( naming.container_class_name(target_type_key)) add_container_property_to_model(domain_model, name, container_qualname, rel_attr)
def generate_table_schema_interface(ti): '''!+DO_NOT_REORDER_USER_APPLIED_INTERFACES def get_domain_interfaces(domain_model): """Return the domain bases for an interface as well as a filtered implements only list (base interfaces removed). Note that for 2nd level (mapped) domain classes i.e. those that inherit from another domain class e.g. Event(Doc), Office(Group), OfficeMember(GroupMembership), an IIModelInterface-providing I*TableSchema interface had already been created (for base class) and assigned to the super class--and that interface will match as one of the domain_base interfaces here. """ domain_bases = [] domain_implements = [] for iface in interface.implementedBy(domain_model): if IIModelInterface.providedBy(iface): domain_bases.append(iface) else: domain_implements.append(iface) domain_bases = tuple(domain_bases) or (IAlchemistContent,) return domain_bases, domain_implements bases, implements = get_domain_interfaces(ti.domain_model) ''' # derived_table_schema: # - ALWAYS dynamically generated # - directlyProvides IIModelInterface (by virtue of IAlchemistContent) type_key = naming.polymorphic_identity(ti.domain_model) # use the class's mapper select table as input for the transformation table_schema_interface_name = naming.table_schema_interface_name(type_key) domain_table = utils.get_local_table(ti.domain_model) derived_table_schema = transmute( domain_table, annotation=ti.descriptor_model, interface_name=table_schema_interface_name, __module__=INTERFACE_MODULE.__name__, #_generated_by="bungeni.alchemist.catalyst.generate_table_schema_interface" #bases=bases) bases=(IAlchemistContent, )) # apply, register on type_info, set on module interface.classImplements(ti.domain_model, derived_table_schema) utils.inisetattr(ti, "derived_table_schema", derived_table_schema) setattr(INTERFACE_MODULE, table_schema_interface_name, derived_table_schema) log.info("generate_table_schema_interface: %s", derived_table_schema) # defensive sanity check - that derived_table_schema is precisely the FIRST # resolving IIModelInterface-providing interface implemented by domain_model # !+ this failing does not necessarily mean an incorrectness for iface in interface.implementedBy(ti.domain_model): if IIModelInterface.providedBy(iface): assert iface is derived_table_schema, (ti.domain_model, iface, id(iface), derived_table_schema, id(derived_table_schema)) break '''!+DO_NOT_REORDER_USER_APPLIED_INTERFACES
def generate_table_schema_interface(ti): '''!+DO_NOT_REORDER_USER_APPLIED_INTERFACES def get_domain_interfaces(domain_model): """Return the domain bases for an interface as well as a filtered implements only list (base interfaces removed). Note that for 2nd level (mapped) domain classes i.e. those that inherit from another domain class e.g. Event(Doc), Office(Group), OfficeMember(GroupMembership), an IIModelInterface-providing I*TableSchema interface had already been created (for base class) and assigned to the super class--and that interface will match as one of the domain_base interfaces here. """ domain_bases = [] domain_implements = [] for iface in interface.implementedBy(domain_model): if IIModelInterface.providedBy(iface): domain_bases.append(iface) else: domain_implements.append(iface) domain_bases = tuple(domain_bases) or (IAlchemistContent,) return domain_bases, domain_implements bases, implements = get_domain_interfaces(ti.domain_model) ''' # derived_table_schema: # - ALWAYS dynamically generated # - directlyProvides IIModelInterface (by virtue of IAlchemistContent) type_key = naming.polymorphic_identity(ti.domain_model) # use the class's mapper select table as input for the transformation table_schema_interface_name = naming.table_schema_interface_name(type_key) domain_table = utils.get_local_table(ti.domain_model) derived_table_schema = transmute( domain_table, annotation=ti.descriptor_model, interface_name=table_schema_interface_name, __module__=INTERFACE_MODULE.__name__, #_generated_by="bungeni.alchemist.catalyst.generate_table_schema_interface" #bases=bases) bases=(IAlchemistContent,)) # apply, register on type_info, set on module interface.classImplements(ti.domain_model, derived_table_schema) utils.inisetattr(ti, "derived_table_schema", derived_table_schema) setattr(INTERFACE_MODULE, table_schema_interface_name, derived_table_schema) log.info("generate_table_schema_interface: %s", derived_table_schema) # defensive sanity check - that derived_table_schema is precisely the FIRST # resolving IIModelInterface-providing interface implemented by domain_model # !+ this failing does not necessarily mean an incorrectness for iface in interface.implementedBy(ti.domain_model): if IIModelInterface.providedBy(iface): assert iface is derived_table_schema, (ti.domain_model, iface, id(iface), derived_table_schema, id(derived_table_schema)) break '''!+DO_NOT_REORDER_USER_APPLIED_INTERFACES
def user_may_edit_context_parent(context): """Does user have edit permission on the context's parent? For a context that is a workflowed sub-object, such as an Attachment or an Event. """ parent = context.head permission = "bungeni.%s.Edit" % (naming.polymorphic_identity(type(parent))) return checkPermission(permission, parent)
def __str__(self): member = removeSecurityProxy(self.context) base_url = ui_utils.url.absoluteURL(getSite(), self.request) return "%s/%s/%s/%s" % ( base_url, super(GroupMemberAdminAbsoluteURLView, self)._group_url_path(member.group), naming.plural(naming.polymorphic_identity(type(member))), stringKey(member))
def __str__(self): member = removeSecurityProxy(self.context) base_url = ui_utils.url.absoluteURL(getSite(), self.request) return "%s/%s/%s/%s" % ( base_url, super(GroupMemberAdminAbsoluteURLView, self)._group_url_path(member.group), naming.plural(naming.polymorphic_identity( type(member))), stringKey(member))
def __str__(self): doc = removeSecurityProxy(self.context) base_url = ui_utils.url.absoluteURL(getSite(), self.request) return "%s/admin/content/chambers/obj-%s/%s/%s" % ( base_url, doc.chamber_id, naming.plural(naming.polymorphic_identity(type(doc))), stringKey(doc))
def generate_container_class(ti): """Generate a zope3 container class for a domain model. """ type_key = naming.polymorphic_identity(ti.domain_model) container_name = naming.container_class_name(type_key) container_iname = naming.container_interface_name(type_key) base_interfaces = (IAlchemistContainer, ) # !+achetype.container_interface? # logging variables msg = (ti.domain_model.__name__, CONTAINER_MODULE.__name__, container_name) # container class - if we already have one, exit if getattr(CONTAINER_MODULE, container_name, None): log.info( "generate_container_class [model=%s] found container %s.%s, skipping", *msg) ti.container_class = getattr(CONTAINER_MODULE, container_name) return container_class = type( container_name, (AlchemistContainer, ), dict(_class=ti.domain_model, __module__=CONTAINER_MODULE.__name__)) # set on CONTAINER_MODULE, register on type_info setattr(CONTAINER_MODULE, container_name, container_class) ti.container_class = container_class log.info("generate_container_class [model=%s] generated container %s.%s", *msg) # container interface - if we already have one, skip creation # !+ should always be newly created? container_iface = getattr(INTERFACE_MODULE, container_iname, None) msg = (ti.domain_model.__name__, CONTAINER_MODULE.__name__, container_iname) if container_iface is not None: assert issubclass(container_iface, IAlchemistContainer) log.info( "generate_container_class [model=%s] skipping container interface %s.%s for", *msg) else: container_iface = interface.interface.InterfaceClass( container_iname, bases=base_interfaces, __module__=INTERFACE_MODULE.__name__) # set on INTERFACE_MODULE, register on type_info setattr(INTERFACE_MODULE, container_iname, container_iface) ti.container_interface = container_iface log.info( "generate_container_class [model=%s] generated container interface %s.%s", *msg) # setup security for n, d in container_iface.namesAndDescriptions(all=True): protectName(container_class, n, "zope.Public") # apply implementedBy if not container_iface.implementedBy(container_class): interface.classImplements(container_class, container_iface)
def _group_url_path(self, group): url_comps = [] group = removeSecurityProxy(group) while group: url_comps.insert(0, "%s/%s" % ( naming.plural(naming.polymorphic_identity(type(group))), stringKey(group))) group = removeSecurityProxy(group.parent_group) return "/".join(url_comps)
def get_message(document, principal_ids): message = bungeni.core.serialize.obj2dict(document, 0) if not message.get("type", None): try: message["type"] = message["document_type"] except KeyError: message["type"] = naming.polymorphic_identity(document.__class__) message["principal_ids"] = list(principal_ids) return message
def _group_url_path(self, group): url_comps = [] group = removeSecurityProxy(group) while group: url_comps.insert( 0, "%s/%s" % (naming.plural(naming.polymorphic_identity( type(group))), stringKey(group))) group = removeSecurityProxy(group.parent_group) return "/".join(url_comps)
def handle_add_save(self, action, data): """After succesful creation of translation, redirect to the view. """ #url = url.absoluteURL(self.context, self.request) #language = get_language_by_name(data["language"])["name"] session = Session() trusted = removeSecurityProxy(self.context) mapper = sa.orm.object_mapper(trusted) pk = getattr(trusted, mapper.primary_key[0].name) curr_trans_by_name = dict( (ct.field_name, ct) for ct in get_field_translations(self.context, data["language"])) def is_changed(context, field_name, new_field_text): if field_name in curr_trans_by_name: old_field_text = curr_trans_by_name[field_name].field_text else: old_field_text = getattr(context, field_name) return not old_field_text == new_field_text translated_attribute_names = [] for field_name in data.keys(): if field_name == "language": continue if is_changed(self.context, field_name, data[field_name]): translated_attribute_names.append(field_name) if field_name in curr_trans_by_name: translation = curr_trans_by_name[field_name] else: translation = domain.FieldTranslation() translation.object_id = pk translation.object_type = naming.polymorphic_identity( trusted.__class__) translation.field_name = field_name translation.lang = data["language"] session.add(translation) translation.field_text = data[field_name] if translated_attribute_names: session.flush() notify( TranslationCreatedEvent(self.context, data["language"], sorted(translated_attribute_names))) # !+EVENT_DRIVEN_CACHE_INVALIDATION(mr, mar-2011) no translate event # invalidate caches for this domain object type #invalidate_caches_for(trusted.__class__.__name__, "translate") #if not self._next_url: # self._next_url = ( \ # "%s/versions/%s" % (url, stringKey(version)) + \ # "?portal_status_message=Translation added") self._finished_add = True
def add_derived_property_to_model(domain_model, name, derived): from bungeni.capi import capi # !+ do not allow clobbering of a same-named attribute assert not name in domain_model.__dict__, \ "May not overwrite %r as derived field, a field with same name is " \ "already defined directly by domain model class for type %r." % ( name, naming.polymorphic_identity(domain_model)) # set as property on domain class setattr(domain_model, name, property(capi.get_form_derived(derived)))
def catalyse(ti): """Called from catalyse_system_descriptors here (for system descriptors) AND from ui.descriptor.localization.new_descriptor_cls for custom types. """ type_key = naming.polymorphic_identity(ti.domain_model) log.info(" ----- CATALYSE: %s -----", type_key) log.debug("ti = %s", ti) generate_table_schema_interface(ti) apply_security(ti) generate_container_class(ti) generate_collection_traversal(ti) return ti
def localize_domain_model_from_descriptor_class(domain_model, descriptor_cls): """Localize the domain model for configuration information in the descriptor i.e. any extended/derived attributes. For any model/descriptor this should be called only once! """ type_key = naming.polymorphic_identity(domain_model) # localize models from descriptors only once! assert type_key not in localize_domain_model_from_descriptor_class.DONE, \ "May not re-localize [%s] domain model from descriptor" % (type_key) localize_domain_model_from_descriptor_class.DONE.append(type_key) log.info("localize_domain_model_from_descriptor_class: (%s, %s)", domain_model.__name__, descriptor_cls.__name__) # ensure cls has own dedicated "extended_properties" list property # i.e. a "extended_properties" key in own cls.__dict__, # and that it is initialized with current (possibly inherited) values domain_model.extended_properties = domain_model.extended_properties[:] for field in descriptor_cls.fields: # extended if field.extended is not None: add_extended_property_to_model(domain_model, field.name, field.extended) # derived if field.derived is not None: add_derived_property_to_model(domain_model, field.name, field.derived) # !+if domain_model.extended_properties: ? instrument_extended_properties(domain_model) mapper_add_relation_vertical_properties(domain_model) # !+AUDIT_EXTENDED_ATTRIBUTES as audit class was created prior to # extended attributes being updated on domain type, need to push onto # it any extended attrs that were read from model's descriptor if IFeatureAudit.implementedBy(domain_model): # either defined manually or created dynamically in feature_audit() audit_kls = getattr(MODEL_MODULE, "%sAudit" % (domain_model.__name__)) # ensure cls has own dedicated "extended_properties" list property audit_kls.extended_properties = domain_model.extended_properties[:] # propagate any extended attributes on head kls also to its audit_kls instrument_extended_properties(audit_kls) # containers for ic in descriptor_cls.info_containers: container_qualname = "bungeni.models.domain.%s" % ( naming.container_class_name(ic.target_type_key)) add_container_property_to_model(domain_model, ic.container_attr_name, container_qualname, ic.rel_attr_name, ic.indirect_key)
def handle_add_save(self, action, data): """After succesful creation of translation, redirect to the view. """ #url = url.absoluteURL(self.context, self.request) #language = get_language_by_name(data["language"])["name"] session = Session() trusted = removeSecurityProxy(self.context) mapper = sa.orm.object_mapper(trusted) pk = getattr(trusted, mapper.primary_key[0].name) curr_trans_by_name = dict( (ct.field_name, ct) for ct in get_field_translations(self.context, data["language"]) ) def is_changed(context, field_name, new_field_text): if field_name in curr_trans_by_name: old_field_text = curr_trans_by_name[field_name].field_text else: old_field_text = getattr(context, field_name) return not old_field_text == new_field_text translated_attribute_names = [] for field_name in data.keys(): if field_name == "language": continue if is_changed(self.context, field_name, data[field_name]): translated_attribute_names.append(field_name) if field_name in curr_trans_by_name: translation = curr_trans_by_name[field_name] else: translation = domain.FieldTranslation() translation.object_id = pk translation.object_type = naming.polymorphic_identity(trusted.__class__) translation.field_name = field_name translation.lang = data["language"] session.add(translation) translation.field_text = data[field_name] if translated_attribute_names: session.flush() notify(TranslationCreatedEvent(self.context, data["language"], sorted(translated_attribute_names))) # !+EVENT_DRIVEN_CACHE_INVALIDATION(mr, mar-2011) no translate event # invalidate caches for this domain object type #invalidate_caches_for(trusted.__class__.__name__, "translate") #if not self._next_url: # self._next_url = ( \ # "%s/versions/%s" % (url, stringKey(version)) + \ # "?portal_status_message=Translation added") self._finished_add = True
def generate_container_class(ti): """Generate a zope3 container class for a domain model. """ type_key = naming.polymorphic_identity(ti.domain_model) container_name = naming.container_class_name(type_key) container_iname = naming.container_interface_name(type_key) base_interfaces = (IAlchemistContainer,) # logging variables msg = (ti.domain_model.__name__, CONTAINER_MODULE.__name__, container_name) # container class - if we already have one, exit if getattr(CONTAINER_MODULE, container_name, None): log.info("generate_container_class [model=%s] found container %s.%s, skipping" % msg) ti.container_class = getattr(CONTAINER_MODULE, container_name) return container_class = type(container_name, (AlchemistContainer,), dict(_class=ti.domain_model, __module__=CONTAINER_MODULE.__name__) ) # set on CONTAINER_MODULE, register on type_info setattr(CONTAINER_MODULE, container_name, container_class) ti.container_class = container_class log.info("generate_container_class [model=%s] generated container %s.%s" % msg) # container interface - if we already have one, skip creation # !+ should always be newly created? container_iface = getattr(INTERFACE_MODULE, container_iname, None) msg = (ti.domain_model.__name__, CONTAINER_MODULE.__name__, container_iname) if container_iface is not None: assert issubclass(container_iface, IAlchemistContainer) log.info("generate_container_class [model=%s] skipping container interface %s.%s for" % msg) else: container_iface = interface.interface.InterfaceClass( container_iname, bases=base_interfaces, __module__=INTERFACE_MODULE.__name__ ) # set on INTERFACE_MODULE, register on type_info setattr(INTERFACE_MODULE, container_iname, container_iface) ti.container_interface = container_iface log.info("generate_container_class [model=%s] generated container interface %s.%s" % msg) # setup security for n, d in container_iface.namesAndDescriptions(all=True): protectName(container_class, n, "zope.Public") # apply implementedBy if not container_iface.implementedBy(container_class): interface.classImplements(container_class, container_iface)
def new_audit_class(kls): """Create, set on MODEL_MODULE, and map {kls}Audit class. """ base_audit_kls = get_base_audit_class(kls) audit_kls = base_audit_kls.auditFactory(kls) # set on MODEL_MODULE setattr(MODEL_MODULE, audit_kls.__name__, audit_kls) # mapper for newly created audit_kls mapper(audit_kls, inherits=base_audit_kls, polymorphic_identity=naming.polymorphic_identity(kls)) log.info("GENERATED new_audit_class %s(%s) for type %s", audit_kls, base_audit_kls, kls) return audit_kls
def validate_required_fields(ti): """Raise a ValueError if a field is not required in ui but required in db. """ mapper = orm.class_mapper(ti.domain_model) if not mapper: return type_name = ti.workflow_key or naming.polymorphic_identity(ti.domain_model) for field_name, field in ti.descriptor_model.fields_by_name.iteritems(): if field.property and not field.property.required: column = mapper.columns.get(field_name) if column is not None: if column.nullable == False: raise ValueError("Descriptor %r field %r is required in " "the db. Must set to be required." % ( type_name, field_name))
def get_field_translations(context, lang): """Get the FieldTranslation items for context fields in language lang NOTE: context may NOT be None """ assert ITranslatable.providedBy(context), "%s %s" % (lang, context) trusted = removeSecurityProxy(context) type_key = naming.polymorphic_identity(trusted.__class__) mapper = orm.object_mapper(trusted) pk = getattr(trusted, mapper.primary_key[0].name) session = Session() query = session.query(domain.FieldTranslation).filter( sql.and_(domain.FieldTranslation.object_type == type_key, domain.FieldTranslation.object_id == pk, domain.FieldTranslation.lang == lang)) return query.all()
def new_audit_class(kls): """Create, set on MODEL_MODULE, and map {kls}Audit class. """ base_audit_kls = get_base_audit_class(kls) audit_kls = base_audit_kls.auditFactory(kls) # set on MODEL_MODULE setattr(MODEL_MODULE, audit_kls.__name__, audit_kls) # mapper for newly created audit_kls mapper(audit_kls, inherits=base_audit_kls, polymorphic_identity=naming.polymorphic_identity(kls) ) log.info("GENERATED new_audit_class %s(%s) for type %s", audit_kls, base_audit_kls, kls) return audit_kls
def expand_containers(self, items, containers, _url, chain=(), context=None): #seen_context = False _url = _url.rstrip("/") current = False for key, container in containers: assert IAlchemistContainer.providedBy(container) # do not include doc containers for docs who do not specifically # declare the parent group instance as a workspace.group_name if IDoc.implementedBy(container.domain_model): group = get_group_for_context(container) assert IGroup.providedBy(group) doc_type_key = naming.polymorphic_identity(container.domain_model) if not group.is_type_workspaced(doc_type_key): continue label = container.domain_model.__name__ descriptor = utils.get_descriptor(container.domain_model) order = 999 if descriptor: order = descriptor.order label = getattr(descriptor, "container_name", None) or \ getattr(descriptor, "display_name", None) if context is not None: current = container.__name__ == context.__name__ selected = not len(chain) and current if current: #seen_context = True nodes = self.expand(chain) else: nodes = () key_url = "%s/%s" % (_url, key) items.append({ "id": self.get_nav_entry_id(key_url), "order": order, "label": translate(label, target_language=get_default_language(), domain="bungeni"), "url": key_url, "current": current, "selected": selected, "kind": "container", "nodes": nodes, }) items.sort(key=lambda item:(item['order'], item['label']))
def validate_required_fields(ti): """Raise a ValueError if a field is not required in ui but required in db. """ mapper = orm.class_mapper(ti.domain_model) if not mapper: return type_key = ti.workflow_key or naming.polymorphic_identity(ti.domain_model) # !+DESCRIPTOR_KEY_DIFF_FIRST_USE descriptor_model would ne None if this is # the first use of it and its key is different than the type_key #if ti.descriptor_model is None: for field_name, field in ti.descriptor_model.fields_by_name.iteritems(): if field.property and not field.property.required: column = mapper.columns.get(field_name) if column is not None: if column.nullable == False: raise ValueError("Descriptor %r field %r is required in " "the db. Must set to be required." % (type_key, field_name))
def get_field_translations(context, lang): """Get the FieldTranslation items for context fields in language lang NOTE: context may NOT be None """ assert ITranslatable.providedBy(context), "%s %s" % (lang, context) trusted = removeSecurityProxy(context) type_key = naming.polymorphic_identity(trusted.__class__) mapper = orm.object_mapper(trusted) pk = getattr(trusted, mapper.primary_key[0].name) session = Session() query = session.query(domain.FieldTranslation).filter( sql.and_( domain.FieldTranslation.object_type == type_key, domain.FieldTranslation.object_id == pk, domain.FieldTranslation.lang == lang ) ) return query.all()
def validate_required_fields(ti): """Raise a ValueError if a field is not required in ui but required in db. """ mapper = orm.class_mapper(ti.domain_model) if not mapper: return type_key = ti.workflow_key or naming.polymorphic_identity(ti.domain_model) # !+DESCRIPTOR_KEY_DIFF_FIRST_USE descriptor_model would ne None if this is # the first use of it and its key is different than the type_key #if ti.descriptor_model is None: for field_name, field in ti.descriptor_model.fields_by_name.iteritems(): if field.property and not field.property.required: column = mapper.columns.get(field_name) if column is not None: if column.nullable == False: raise ValueError("Descriptor %r field %r is required in " "the db. Must set to be required." % ( type_key, field_name))
def get_available_translations(context): """ returns a dictionary of all available translations (key) and the object_id of the object (value) """ trusted = removeSecurityProxy(context) type_key = naming.polymorphic_identity(trusted.__class__) try: mapper = orm.object_mapper(trusted) pk = getattr(trusted, mapper.primary_key[0].name) session = Session() query = session.query(domain.FieldTranslation).filter( sql.and_(domain.FieldTranslation.object_id == pk, domain.FieldTranslation.object_type == type_key)).distinct().values("lang", "object_id") return dict(query) except: return {}
def get_available_translations(context): """ returns a dictionary of all available translations (key) and the object_id of the object (value) """ trusted = removeSecurityProxy(context) type_key = naming.polymorphic_identity(trusted.__class__) try: mapper = orm.object_mapper(trusted) pk = getattr(trusted, mapper.primary_key[0].name) session = Session() query = session.query(domain.FieldTranslation).filter( sql.and_( domain.FieldTranslation.object_id == pk, domain.FieldTranslation.object_type == type_key) ).distinct().values("lang", "object_id") return dict(query) except: return {}
# ("group." + schema.groups.c.type + "." + # rdb.cast(schema.groups.c.group_id, rdb.String) # ).label("group_principal_id") #), "contained_groups": relation(domain.Group, backref=backref("parent_group", remote_side=schema.groups.c.group_id) ), "group_addresses": relation(domain.GroupAddress, # !+HEAD_DOCUMENT_ITEM(mr, sep-2011) standardize name backref=backref("head", remote_side=schema.groups.c.group_id) ), # "keywords": relation(domain.Keyword, secondary=schema.groups_keywords) }, polymorphic_on=schema.groups.c.type, polymorphic_identity=polymorphic_identity(domain.Group) ) # Keywords for groups #mapper(domain.Keyword, schema.keywords, # properties = { # "groups": relation(domain.Group, # secondary=schema.groups_keywords, backref="keywords" # ), # } #) # delegate rights to act on behalf of a user mapper(domain.UserDelegation, schema.user_delegations, properties={ "user": relation(domain.User,
def name(cls): return naming.polymorphic_identity(cls)
def configurable_mappings(kls): """Configuration mappings for declarative-model types. """ name = kls.__name__ mapper_add_relation_vertical_properties(kls) # auditable, determine properties, map audit class/table if interfaces.IFeatureAudit.implementedBy(kls): # either defined manually or created dynamically in domain.feature_audit() audit_kls = getattr(domain, "%sAudit" % (name)) # assumption: audit_kls only uses single inheritance (at least for # those created dynamically in domain.feature_audit()) base_audit_kls = audit_kls.__bases__[0] assert issubclass(base_audit_kls, domain.Audit), \ "Audit class %s is not a proper subclass of %s" % ( audit_kls, domain.Audit) # mapper for the audit_cls for this kls, if it was created dynamically if kls in domain.feature_audit.CREATED_AUDIT_CLASS_FOR: mapper(audit_kls, inherits=base_audit_kls, polymorphic_identity=polymorphic_identity(kls) ) # propagate any extended attributes on head kls also to its audit_kls for vp_name, vp_type in kls.extended_properties: mapper_add_relation_vertical_property(audit_kls, vp_name, vp_type) # add any properties to the head kls itself def mapper_add_configurable_properties(kls): kls_mapper = class_mapper(kls) def configurable_properties(kls, mapper_properties): """Add properties, as per configured features for a domain type. """ # auditable if interfaces.IFeatureAudit.implementedBy(kls): # kls.changes <-> change.audit.audit_head=doc: # doc[@TYPE] <-- TYPE_audit <-> audit <-> change # get head table for kls, and its audit table. tbl = kls_mapper.mapped_table audit_tbl = getattr(schema, "%s_audit" % (tbl.name)) # get tbl PK column assert len(tbl.primary_key) == 1 # !+ASSUMPTION_SINGLE_COLUMN_PK(mr, may-2012) pk_col = [ c for c in tbl.primary_key ][0] mapper_properties["changes"] = relation(domain.Change, primaryjoin=rdb.and_( pk_col == audit_tbl.c.get(pk_col.name), ), secondary=audit_tbl, secondaryjoin=rdb.and_( audit_tbl.c.audit_id == schema.change.c.audit_id, ), lazy=True, order_by=schema.change.c.audit_id.desc(), cascade="all", passive_deletes=False, # SA default ) # versionable if interfaces.IFeatureVersion.implementedBy(kls): pass return mapper_properties for key, prop in configurable_properties(kls, {}).items(): kls_mapper.add_property(key, prop) mapper_add_configurable_properties(kls)
def create_id(event): """Create an event (sitting or session) identifier of the form <type>-<id> """ mapper = orm.object_mapper(event) return "%s-%d" % (naming.polymorphic_identity(event.__class__), mapper.primary_key_from_instance(event)[0])
def localize_descriptor(descriptor_elem, is_init, scope="system"): """Localize descriptor from descriptor XML element. Return the created/modified descriptor class. """ type_key = xas(descriptor_elem, "name") ti = capi.get_type_info(type_key) # !+ ensure domain_model has already been set assert ti.domain_model, type_key order = xai(descriptor_elem, "order") fields = new_descriptor_fields(descriptor_elem) info_containers = [ parse_container(c_elem) for c_elem in descriptor_elem.findall("container") ] integrity = descriptor_elem.find("integrity") if integrity is not None: constraints = [ capi.get_form_constraint(c) for c in xas(integrity, "constraints", "").split() ] validations = [ capi.get_form_validator(v) for v in xas(integrity, "validations", "").split() ] else: constraints, validations = (), () domain_model = ti.domain_model if scope == "custom": try: cls = update_descriptor_cls(type_key, order, fields, info_containers, constraints, validations) except AttributeError: # first time around, no such descriptor - so create a new custom descriptor archetype_key = naming.polymorphic_identity(ti.archetype) cls = new_descriptor_cls(type_key, archetype_key, order, fields, info_containers, constraints, validations) # only "push" onto cls (hiding same-named properties or overriding # inherited setting) if set in the descriptor AND only on cls creation: if xas(descriptor_elem, "label"): cls.display_name = xas(descriptor_elem, "label") if xas(descriptor_elem, "container_label"): cls.container_name = xas(descriptor_elem, "container_label") if xas(descriptor_elem, "sort_on"): cls.sort_on = xas(descriptor_elem, "sort_on").split() # !+ assert each name is a field in the descriptor if xas(descriptor_elem, "sort_dir"): # default cls.sort_dir: "desc" cls.sort_dir = xas(descriptor_elem, "sort_dir") naming.MSGIDS.add(cls.display_name) naming.MSGIDS.add(cls.container_name) # this is guarenteed to execute maximum once per type_key alchemist.model.localize_domain_model_from_descriptor_class( domain_model, cls) #!+CATALYSE_SYSTEM_DESCRIPTORS -- all custom types are catalysed here! alchemist.catalyst.catalyse(ti) else: # non-custom cls = update_descriptor_cls(type_key, order, fields, info_containers, constraints, validations) # ensures that this executes a maximum once per type_key if type_key in alchemist.model.localize_domain_model_from_descriptor_class.DONE: log.warn( "Ignoring attempt to re-localize model [scope=%r] " "from descriptor for type %r", scope, type_key) else: alchemist.model.localize_domain_model_from_descriptor_class( domain_model, cls) #!+CATALYSE_SYSTEM_DESCRIPTORS -- all non-custom types have already # catalysed on import of ui.descriptor, and may not "catalyse twice" # so just working around it by "calling" less of alchemist.catalyst.catalyse(ti) # Make ui.descriptor.catalyse_system_descriptors to be more selective, # and then catalyse remaining support types here? #alchemist.catalyst.catalyse(ti) #!+re-apply_security breaks edit event view (fields shown in view mode!) #alchemist.catalyst.apply_security(ti) alchemist.catalyst.generate_collection_traversal(ti) log.debug("Localized [init=%s] descriptor [%s] %s", is_init, type_key, ti) return cls
inherits=domain.Change, polymorphic_on=schema.change.c.action, # polymorphic discriminator polymorphic_identity=polymorphic_identity(domain.Version), #!+only concrete {type}_audit record are created ) # !+polymorphic_identity_multi only allows a single value... e.g. if needed to # add a 2nd value such as "reversion" would not be able to -- but seems we # should be able to tweak the version mapper's polymorphic_map to allow # multiple values for polymorphic_identity (but does not work anyway # attachment.versions does not pick up reversions): #vm.polymorphic_map["reversion"] = vm.polymorphic_map["version"] #del vm ''' mapper(domain.Audit, schema.audit, polymorphic_on=schema.audit.c.audit_type, # polymorphic discriminator polymorphic_identity=polymorphic_identity(domain.Audit) ) # ARCHETYPES # doc mapper(domain.Doc, schema.doc, polymorphic_on=schema.doc.c.type, # polymorphic discriminator polymorphic_identity=polymorphic_identity(domain.Doc), properties={ #"owner": relation(domain.User, # primaryjoin=rdb.and_(schema.doc.c.owner_id == schema.user.c.user_id), # uselist=False, # lazy=False), # !+PrincipalRoleMap replacing above implementation of "owner" property
def apply_security(ti): domain_model, descriptor_model = ti.domain_model, ti.descriptor_model type_key = naming.polymorphic_identity(domain_model) log.debug("APPLY SECURITY: %s %s", type_key, domain_model) # first, "inherit" security settings of super classes i.e. equivalent of # something like <require like_class=".domain.Doc" /> for c in domain_model.__bases__: if c is object: continue log.debug(" LIKE_CLASS: %s", c) protectLikeUnto(domain_model, c) # !+DECL permissions here--for CUSTOM types only, and SINCE r9946--override # what is defined in domain.zcml, as opposed to vice-versa (probably # because CUSTOM types are setup at a later stage). # So (for CUSTOM types only?) we use the parametrized # bungeni.{type_key}.{Mode} as the view/edit permission: pv_type = "zope.Public" # view permission, for type pe_type = "zope.Public" # edit permission, for type if descriptor_model.scope == "custom": pv_type = "bungeni.%s.View" % (type_key) pe_type = "bungeni.%s.Edit" % (type_key) # !+SCHEMA_FIELDS(mr, oct-2012) all this seems superfluous anyway, as is # (always?) overwritten further down? Switch to base loop on superset of # names (dir(cls)?) and then decide ONCE on various criteria how to # protect the name. _view_protected = set() # remember names protected for view _edit_protected = set() # remember names protected for edit # sorted (for clearer logging) list of attr names that are BOTH defined # by the db mapped-table schema AND have a dedicated UI Field. dts_attrs = [ n for n in ti.derived_table_schema.names(all=True) ] df_attrs = [ f.get("name") for f in descriptor_model.fields ] attrs = sorted(set(dts_attrs).union(set(df_attrs))) log.debug(" DTS+Fields: %s, %s", ti.derived_table_schema.__name__, descriptor_model.__name__) for n in attrs: # !+DECL special cases, do not override domain.zcml... if n in ("response_text",): continue _view_protected.add(n); _edit_protected.add(n) pv = pv_type pe = pe_type model_field = descriptor_model.get(n) if model_field: if descriptor_model.scope != "custom": # !+DECL proceed as before for now pv = model_field.view_permission # always "zope.Public" pe = model_field.edit_permission # always "zope.ManageContent" # !+DECL parametrize all permissions by type AND mode, ensure to grant # to appropriate roles. What about non-workflows or non-catalyzed types? protectName(domain_model, n, pv) protectSetAttribute(domain_model, n, pe) DTS = n in dts_attrs and "dts" or " " DF = n in df_attrs and "df" or " " log.debug(" %s %s [%s] view:%s edit:%s %x", DTS, DF, n, pv, pe, id(model_field)) if n not in domain_model.__dict__: log.debug(" ---- [%s] !+SCHEMA_FIELDS not in %s.__dict__", n, domain_model) # container attributes (never a UI Field for these) log.debug(" __dict__: %s" % (domain_model)) for k in sorted(domain_model.__dict__.keys()): # !+ if IManagedContainer.providedBy(v): ? v = domain_model.__dict__[k] if isinstance(v, ManagedContainerDescriptor): if k in _view_protected: log.debug(" ---- %s RESETTING...", k) _view_protected.add(k) log.debug(" managed %s view:%s" % (k, "zope.Public")) elif isinstance(v, orm.attributes.InstrumentedAttribute): if k in _view_protected: log.debug(" ---- %s RESETTING...", k) _view_protected.add(k) log.debug(" instrumented [%s] view:%s", k, "zope.Public") else: log.debug(" ---- [%s] !+SCHEMA_FIELD IN __dict__ but NOT " "instrumented OR managed", k) continue if k not in attrs: log.debug(" ---- [%s] !+SCHEMA_FIELDS not in attrs", k) protectName(domain_model, k, "zope.Public") #!+pv_type # Dump permission_id required to getattr/setattr for "custom" types. # We only dump the security settings for "custom" types as it only these # are processed AFTER that domain.zcml has been executed (for other types, # loaded earlier during app startup, it is the settings in domain.zcml # (executed later during app startup) that ends up applying. if descriptor_model.scope == "custom": from zope.security import proxy, checker dmc = checker.getChecker(proxy.ProxyFactory(domain_model())) log.debug(" checker: %s", dmc) for n in sorted(_view_protected.union(["response_text"])): g = dmc.get_permissions.get(n) s = dmc.set_permissions.get(n) #dmc.setattr_permission_id(n) log.debug(" [%s] get:%s set:%s", n, getattr(g, "__name__", g), s)
def configurable_mappings(kls): """Configuration mappings for declarative-model types. """ name = kls.__name__ mapper_add_relation_vertical_properties(kls) # auditable, determine properties, map audit class/table if interfaces.IFeatureAudit.implementedBy(kls): # either defined manually or created dynamically in domain.feature_audit() audit_kls = getattr(domain, "%sAudit" % (name)) # assumption: audit_kls only uses single inheritance (at least for # those created dynamically in domain.feature_audit()) base_audit_kls = audit_kls.__bases__[0] assert issubclass(base_audit_kls, domain.Audit), \ "Audit class %s is not a proper subclass of %s" % ( audit_kls, domain.Audit) # mapper for the audit_cls for this kls, if it was created dynamically if kls in domain.feature_audit.CREATED_AUDIT_CLASS_FOR: mapper(audit_kls, inherits=base_audit_kls, polymorphic_identity=polymorphic_identity(kls)) # propagate any extended attributes on head kls also to its audit_kls for vp_name, vp_type in kls.extended_properties: mapper_add_relation_vertical_property(audit_kls, vp_name, vp_type) # add any properties to the head kls itself def mapper_add_configurable_properties(kls): kls_mapper = class_mapper(kls) def configurable_properties(kls, mapper_properties): """Add properties, as per configured features for a domain type. """ # auditable if interfaces.IFeatureAudit.implementedBy(kls): # kls.changes <-> change.audit.audit_head=doc: # doc[@TYPE] <-- TYPE_audit <-> audit <-> change # get head table for kls, and its audit table. tbl = kls_mapper.mapped_table audit_tbl = getattr(schema, "%s_audit" % (tbl.name)) # get tbl PK column assert len(tbl.primary_key) == 1 # !+ASSUMPTION_SINGLE_COLUMN_PK(mr, may-2012) pk_col = [c for c in tbl.primary_key][0] mapper_properties["changes"] = relation( domain.Change, primaryjoin=rdb.and_( pk_col == audit_tbl.c.get(pk_col.name), ), secondary=audit_tbl, secondaryjoin=rdb.and_( audit_tbl.c.audit_id == schema.change.c.audit_id, ), lazy=True, order_by=schema.change.c.audit_id.desc(), cascade="all", passive_deletes=False, # SA default ) # versionable if interfaces.IFeatureVersion.implementedBy(kls): pass return mapper_properties for key, prop in configurable_properties(kls, {}).items(): kls_mapper.add_property(key, prop) mapper_add_configurable_properties(kls)
def create_id(event): """Create an event (sitting or session) identifier of the form <type>-<id> """ mapper = orm.object_mapper(event) return "%s-%d" % (naming.polymorphic_identity( event.__class__), mapper.primary_key_from_instance(event)[0])
mapper( domain.Principal, schema.principal, # principal should only be created as user or group #polymorphic_identity=polymorphic_identity(domain.Principal), polymorphic_on=schema.principal.c.type, # polymorphic discriminator properties={}) # Users # general representation of a person mapper( domain.User, schema.user, inherits=domain.Principal, polymorphic_identity=polymorphic_identity(domain.User), properties={ # !+ADDRESS naming, use addresses "user_addresses": relation( domain.UserAddress, # !+HEAD_DOCUMENT_ITEM(mr, sep-2011) standardize name backref=backref("head", remote_side=schema.principal.c.principal_id)), "subscriptions": relation(domain.Doc, secondary=schema.user_doc), }) mapper(domain.UserSubscription, schema.user_doc) mapper(domain.AdminUser,
def localize_descriptor(type_key, descriptor_elem, scope="system"): """Localize descriptor from descriptor XML element. Return the created/modified descriptor class. """ ti = capi.get_type_info(type_key) # !+ ensure domain_model has already been set assert ti.domain_model, type_key order = xai(descriptor_elem, "order") fields = new_descriptor_fields(descriptor_elem) info_containers = [] for c_elem in descriptor_elem.findall("container"): # !+ @view_title:i18n_key, @view_id:token, @weight:int ? target_type_key, rel_attr_name = xas(c_elem, "match").split(".", 1) container_attr_name = xas(c_elem, "name") or naming.plural(target_type_key) indirect_key = xas(c_elem, "indirect_key") viewlet = xab(c_elem, "viewlet", False) add_info_container(type_key, info_containers, container_attr_name, target_type_key, rel_attr_name, indirect_key, viewlet=viewlet, _origin="container") integrity = descriptor_elem.find("integrity") if integrity is not None: constraints = [ capi.get_form_constraint(c) for c in xas(integrity, "constraints", "").split() ] validations = [ capi.get_form_validator(v) for v in xas(integrity, "validations", "").split() ] else: constraints, validations = (), () if scope=="custom": try: cls = update_descriptor_cls(type_key, order, fields, info_containers, constraints, validations) except AttributeError: # first time around, no such descriptor - so create a new custom descriptor archetype_key = naming.polymorphic_identity(ti.archetype) cls = new_descriptor_cls(type_key, archetype_key, order, fields, info_containers, constraints, validations) if xas(descriptor_elem, "sort_on"): cls.sort_on = xas(descriptor_elem, "sort_on").split() # !+ assert each name is a field in the descriptor if xas(descriptor_elem, "sort_dir"): # default cls.sort_dir: "desc" cls.sort_dir = xas(descriptor_elem, "sort_dir") update_new_descriptor_cls_from_ti(ti) else: # non-custom cls = update_descriptor_cls(type_key, order, fields, info_containers, constraints, validations) # finish model/descriptor setup from feature configuration if ti.workflow: for feature in ti.workflow.features: feature.setup_ui(ti.domain_model) # custom container order - re-sort info_containers such that "container" # ones precede all "feature" ones, plus make the list immutable i.e. no # further changes allowed. ics = ti.descriptor_model.info_containers ti.descriptor_model.info_containers = tuple(sorted(ics, # False sorts before True (as 0 sorts before 1) key=lambda ic: ic._origin == "feature" )) log.debug("Localized descriptor [%s] %s", type_key, ti) return cls
from bungeni.utils.naming import polymorphic_identity mapper(domain.Principal, schema.principal, # principal should only be created as user or group #polymorphic_identity=polymorphic_identity(domain.Principal), polymorphic_on=schema.principal.c.type, # polymorphic discriminator properties={} ) # Users # general representation of a person mapper(domain.User, schema.user, inherits=domain.Principal, polymorphic_identity=polymorphic_identity(domain.User), properties={ # !+ADDRESS naming, use addresses "user_addresses": relation(domain.UserAddress, # !+HEAD_DOCUMENT_ITEM(mr, sep-2011) standardize name backref=backref("head", remote_side=schema.principal.c.principal_id) ), "subscriptions": relation(domain.Doc, secondary=schema.user_doc ), } ) mapper(domain.UserSubscription, schema.user_doc) mapper(domain.AdminUser, schema.admin_user,
# rdb.cast(schema.groups.c.group_id, rdb.String) # ).label("group_principal_id") #), "contained_groups": relation(domain.Group, backref=backref("parent_group", remote_side=schema.groups.c.group_id)), "group_addresses": relation( domain.GroupAddress, # !+HEAD_DOCUMENT_ITEM(mr, sep-2011) standardize name backref=backref("head", remote_side=schema.groups.c.group_id)), # "keywords": relation(domain.Keyword, secondary=schema.groups_keywords) }, polymorphic_on=schema.groups.c.type, polymorphic_identity=polymorphic_identity(domain.Group)) # Keywords for groups #mapper(domain.Keyword, schema.keywords, # properties = { # "groups": relation(domain.Group, # secondary=schema.groups_keywords, backref="keywords" # ), # } #) # delegate rights to act on behalf of a user mapper( domain.UserDelegation, schema.user_delegations, properties={
def localize_descriptor(descriptor_elem, is_init, scope="system"): """Localize descriptor from descriptor XML element. Return the created/modified descriptor class. """ type_key = xas(descriptor_elem, "name") ti = capi.get_type_info(type_key) # !+ ensure domain_model has already been set assert ti.domain_model, type_key order = xai(descriptor_elem, "order") fields = new_descriptor_fields(descriptor_elem) info_containers = [ parse_container(c_elem) for c_elem in descriptor_elem.findall("container") ] integrity = descriptor_elem.find("integrity") if integrity is not None: constraints = [ capi.get_form_constraint(c) for c in xas(integrity, "constraints", "").split() ] validations = [ capi.get_form_validator(v) for v in xas(integrity, "validations", "").split() ] else: constraints, validations = (), () domain_model = ti.domain_model if scope=="custom": try: cls = update_descriptor_cls(type_key, order, fields, info_containers, constraints, validations) except AttributeError: # first time around, no such descriptor - so create a new custom descriptor archetype_key = naming.polymorphic_identity(ti.archetype) cls = new_descriptor_cls(type_key, archetype_key, order, fields, info_containers, constraints, validations) # only "push" onto cls (hiding same-named properties or overriding # inherited setting) if set in the descriptor AND only on cls creation: if xas(descriptor_elem, "label"): cls.display_name = xas(descriptor_elem, "label") if xas(descriptor_elem, "container_label"): cls.container_name = xas(descriptor_elem, "container_label") if xas(descriptor_elem, "sort_on"): cls.sort_on = xas(descriptor_elem, "sort_on").split() # !+ assert each name is a field in the descriptor if xas(descriptor_elem, "sort_dir"): # default cls.sort_dir: "desc" cls.sort_dir = xas(descriptor_elem, "sort_dir") naming.MSGIDS.add(cls.display_name) naming.MSGIDS.add(cls.container_name) # this is guarenteed to execute maximum once per type_key alchemist.model.localize_domain_model_from_descriptor_class(domain_model, cls) #!+CATALYSE_SYSTEM_DESCRIPTORS -- all custom types are catalysed here! alchemist.catalyst.catalyse(ti) else: # non-custom cls = update_descriptor_cls(type_key, order, fields, info_containers, constraints, validations) # ensures that this executes a maximum once per type_key if type_key in alchemist.model.localize_domain_model_from_descriptor_class.DONE: log.warn("Ignoring attempt to re-localize model [scope=%r] " "from descriptor for type %r", scope, type_key) else: alchemist.model.localize_domain_model_from_descriptor_class(domain_model, cls) #!+CATALYSE_SYSTEM_DESCRIPTORS -- all non-custom types have already # catalysed on import of ui.descriptor, and may not "catalyse twice" # so just working around it by "calling" less of alchemist.catalyst.catalyse(ti) # Make ui.descriptor.catalyse_system_descriptors to be more selective, # and then catalyse remaining support types here? #alchemist.catalyst.catalyse(ti) #!+re-apply_security breaks edit event view (fields shown in view mode!) #alchemist.catalyst.apply_security(ti) alchemist.catalyst.generate_collection_traversal(ti) log.debug("Localized [init=%s] descriptor [%s] %s", is_init, type_key, ti) return cls