def new_descriptor_cls(type_key, archetype_key, order, fields, info_containers, constraints, validations ): """Generate and setup new custom descriptor from configuration. """ archetype_ti = capi.get_type_info(archetype_key) archetype_descriptor_key = archetype_ti.descriptor_key or archetype_ti.type_key descriptor_archetype = \ get_localizable_descriptor_class(DESCRIPTOR_MODULE, archetype_descriptor_key) assert descriptor_archetype.scope in ("archetype", "custom"), \ "Custom descriptor %r specifies an invalid archetype %r" % ( type_key, archetype_key) descriptor_cls_name = naming.descriptor_class_name(type_key) cls = type(descriptor_cls_name, (descriptor_archetype,), { "scope": "custom", "__module__": DESCRIPTOR_MODULE.__name__, "order": order, "fields": fields, "info_containers": info_containers, "schema_invariants": constraints, "custom_validators": validations, "default_field_order": [ f.name for f in fields ], }) # set on DESCRIPTOR_MODULE, register as the ti.descriptor_model setattr(DESCRIPTOR_MODULE, descriptor_cls_name, cls) ti = capi.get_type_info(type_key) ti.descriptor_model = cls log.info("generated descriptor [type=%s] %s.%s", type_key, DESCRIPTOR_MODULE.__name__, descriptor_cls_name) return cls
def batch_serialize(type_key="*"): """Serialize all objects of `type_key` or all types if with a wildcard(*) as the type key. """ #keep count of serialized objects for feedback serialized_count = 0 #list of domain classes to be serialized domain_models = [] if type_key == "*": types_vocab = zope.component.getUtility( schema.interfaces.IVocabularyFactory, "serializable_type") for term in types_vocab(): if term.value == "*": continue info = capi.get_type_info(term.value) domain_models.append(info.domain_model) else: info = capi.get_type_info(type_key) if info.workflow: domain_models.append(info.domain_model) session = Session() for domain_model in domain_models: objects = session.query(domain_model).all() map(queue_object_serialization, objects) serialized_count += len(objects) return serialized_count
def make_workspace_url(obj_id, type_name, status, context, chamber_id): if obj_id and type_name and status: domain_class = capi.get_type_info(type_name).domain_model if model_ifaces.IFeatureWorkspace.implementedBy(domain_class): ws_roles = common.get_workspace_roles() tabs_config = zope.component.getUtility(IWorkspaceTabsUtility) ti = capi.get_type_info(domain_class) tab = tabs_config.get_tab(ws_roles[0], domain_class, status) if tab: return "./my-documents/%s/%s-%s" % (tab, ti.workflow_key, obj_id)
def make_workspace_url(obj_id, type_name, status, context, chamber_id): if obj_id and type_name and status: domain_class = capi.get_type_info(type_name).domain_model if model_ifaces.IFeatureWorkspace.implementedBy(domain_class): ws_roles = common.get_workspace_roles() tabs_config = zope.component.getUtility(IWorkspaceTabsUtility) ti = capi.get_type_info(domain_class) tab = tabs_config.get_tab(ws_roles[0], domain_class, status) if tab: return "./my-documents/%s/%s-%s" %( tab, ti.workflow_key, obj_id )
def batch_serialize(type_key="*", start_date=None, end_date=None): """Serialize all objects of `type_key` or all types if with a wildcard(*) as the type key. Item set may be filtered by status date (start_date and/or end date) range. """ # keep count of serialized objects for feedback serialized_count = 0 # list of domain classes to be serialized domain_models = [] if type_key == "*": types_vocab = get_vocabulary("serializable_type") # we add the legislature and the chamber first for term in types_vocab(None): if term.value in ("legislature", "chamber"): info = capi.get_type_info(term.value) domain_models.append(info.domain_model) # we add the rest now for term in types_vocab(None): if term.value == "*": continue if term.value not in ("legislature", "chamber"): info = capi.get_type_info(term.value) domain_models.append(info.domain_model) else: info = capi.get_type_info(type_key) if info.workflow: domain_models.append(info.domain_model) session = Session() for domain_model in domain_models: query = session.query(domain_model) if IWorkflowed.implementedBy(domain_model) and (start_date or end_date): column = domain_model.status_date if start_date and end_date: expression = sql.between(column, start_date, end_date) elif start_date: expression = (column>=start_date) elif end_date: expression = (column<=end_date) query = query.filter(expression) objects = query.all() # !+FILTER(ah, 2014-09-19) adding a filter here - sometimes there is a mismatch # between the count shown on the screen i.e. X items sent for serialization and # and only X-n items appear in the queue - there seem to be empty objects returned # sometimes, so eliminating those objects = filter(None, objects) map(queue_object_serialization, objects) log.error(" COUNTING_TYPES_SERIALIZED -- %s COUNT -- %s", domain_model, len(objects)) serialized_count += len(objects) return serialized_count
def get_scheduled_states(type_key): ti = capi.get_type_info(type_key) manager = ISchedulingManager(ti.domain_model(), None) if manager: return ISchedulingManager(ti.domain_model()).scheduled_states else: return []
def group_items(self): """Prepare and remember item aggregations, for convenient access from within templates. Grouping aggregations are keyed on the *pluralized* name of: a) the item's type key b) (if different to type key) its custom archetype key c) (if different to custom archetype key) its system archetype key. Example, an item of type "assembly_question" (that has custom archetype "question", and system archetype "doc") will be included in the following aggregation list attributes on this instance: a) es.assembly_questions b) es.questions c) es.docs """ for scheduled in self.sitting.item_schedule: type_key = scheduled.item.type ti = capi.get_type_info(type_key) grouping_type_keys = [type_key] for tk in (ti.custom_archetype_key, ti.sys_archetype_key): if tk is not None and tk not in grouping_type_keys: grouping_type_keys.append(tk) for tk in grouping_type_keys: grouping_name = naming.plural(tk) log.debug( "[Reports] Adding %s to grouping %r in expanded sitting %s", scheduled, grouping_name, self) self.grouped.setdefault(grouping_name, []).append(scheduled.item)
def localize_domain_model_from_descriptor_class(domain_model, descriptor_cls): """Localize the domain model for configuration information in the descriptor i.e. any extended/derived attributes. For any model/descriptor this should be called only once! """ type_key = naming.polymorphic_identity(domain_model) # localize models from descriptors only once! assert type_key not in localize_domain_model_from_descriptor_class.DONE, \ "May not re-localize [%s] domain model from descriptor" % (type_key) localize_domain_model_from_descriptor_class.DONE.append(type_key) #!+GET_ARCHETYPE #!+archetype_key = naming.polymorphic_identity(domain_model.__bases__[0]) multiple inheritance... archetype_key = naming._type_key_from_descriptor_class_name( descriptor_cls.__bases__[0].__name__) for field in descriptor_cls.fields: # extended if field.extended is not None: add_extended_property_to_model(domain_model, field.name, field.extended, archetype_key) # derived if field.derived is not None: add_derived_property_to_model(domain_model, field.name, field.derived) # !+if domain_model.extended_properties: ? # !+instrument_extended_properties, archetype_key => table... instrument_extended_properties(domain_model, archetype_key) mapper_add_relation_vertical_properties(domain_model) # !+AUDIT_EXTENDED_ATTRIBUTES as audit class was created prior to # extended attributes being updated on domain type, need to push onto # it any extended attrs that were read from model's descriptor if interfaces.IFeatureAudit.implementedBy(domain_model): # either defined manually or created dynamically in feature_audit() audit_kls = getattr(MODEL_MODULE, "%sAudit" % (domain_model.__name__)) # propagate any extended attributes on head kls also to its audit_kls import bungeni.models.domain audit_table_name = bungeni.models.domain.get_audit_table_name(domain_model) instrument_extended_properties( audit_kls, audit_table_name, from_class=domain_model) # containers from bungeni.capi import capi for name, target_type_key, rel_attr in descriptor_cls.info_containers: try: tti = capi.get_type_info(target_type_key) except KeyError: # target type not enabled log.warn("Ignoring %r container property %r to disabled type: %s.%s", type_key, name, target_type_key, rel_attr) continue container_qualname = "bungeni.models.domain.%s" % ( naming.container_class_name(target_type_key)) add_container_property_to_model(domain_model, name, container_qualname, rel_attr)
def create_scheduling_manager(domain_class, **params): """Instantiate a scheduling manager instance for `domain_class`. """ manager_name = "%sSchedulingManager" % domain_class.__name__ if manager_name in globals().keys(): log.error("Scheduling manager named %s already exists", manager_name) return ti = capi.get_type_info(domain_class) domain_iface = ti.interface if domain_iface is None: log.error("No model interface for class %s", domain_class) log.error("Skipping scheduling manager setup for for class %s", domain_class) return globals()[manager_name] = type(manager_name, (SchedulingManager,), {}) manager = globals()[manager_name] known_params = interfaces.ISchedulingManager.names() for config_name, config_value in params.iteritems(): assert config_name in known_params, ("Check your scheduling " "feature configuration for %s. Only these parameters may be " "configured %s" % (domain_class.__name__, known_params)) config_type = type(getattr(manager, config_name)) if config_type in (tuple, list): config_value = map(str.strip, config_value.split()) setattr(manager, config_name, config_type(config_value)) gsm = getGlobalSiteManager() gsm.registerAdapter(manager, (domain_iface,), interfaces.ISchedulingManager) return manager_name
def register_custom_types(): """Extend TYPE_REGISTRY with the declarations from bungeni_custom/types.xml. This is called prior to loading of the workflows for these custom types. Returns (type_key, TI) for the newly created TI instance. """ xas, xab = misc.xml_attr_str, misc.xml_attr_bool tag_archetype_key_mapping = { "doc": "doc", "event": "event", "group": "group", "member": "group_member" } def parse_elem(type_elem): type_key = xas(type_elem, "name") workflow_key = xas(type_elem, "workflow") descriptor_key = xas(type_elem, "descriptor") sys_archetype_key = tag_archetype_key_mapping[type_elem.tag] custom_archetype_key = xas(type_elem, "archetype") label = xas(type_elem, "label", None) container_label = xas(type_elem, "container_label", None) return (type_key, sys_archetype_key, custom_archetype_key, workflow_key, descriptor_key, label, container_label) def enabled_elems(elems): for elem in elems: if xab(elem, "enabled", default=True): yield elem # load types.xml file_path = capi.get_path_for("types.xml") etypes = capi.schema.validate_file_rng("types", file_path) # register enabled types - ignoring not enabled types from bungeni.alchemist import type_info # custom "event" types (must be loaded prior to custom "doc" types) for etype in enabled_elems(etypes.iterchildren("event")): type_key, ti = type_info.register_new_custom_type(*parse_elem(etype)) # custom "doc" types for etype in enabled_elems(etypes.iterchildren("doc")): type_key, ti = type_info.register_new_custom_type(*parse_elem(etype)) # group/member types for egroup in enabled_elems(etypes.iterchildren("group")): group_type_key, ti = type_info.register_new_custom_type(*parse_elem(egroup)) ti.domain_model.privilege_extent = xas(egroup, "privilege_extent", "group") for emember in enabled_elems(egroup.iterchildren("member")): type_key, ti = type_info.register_new_custom_type(*parse_elem(emember)) ti.within_type_key = group_type_key # SYSTEM WIDE settings (set on class attributes on capi) capi.__class__.bicameral = xab(etypes, "bicameral") capi.__class__.country_code = xas(etypes, "country_code") capi.__class__.legislature_type_key = xas(etypes, "legislature_type") capi.__class__.chamber_type_key = xas(etypes, "chamber_type") # sanity checks for tk in (capi.chamber_type_key, capi.legislature_type_key): ti = capi.get_type_info(tk) # KeyError assert ti.sys_archetype_key == "group", \ "Value %r specified for %r must be a %r" % (tk, attr, "group")
def update(self): user_id = self.context.user_id parliament_id = self.context.group_id wf = capi.get_type_info("signatory").workflow session = Session() # add cosigned items signed_pi_ids = [sgn.head_id for sgn in session.query(domain.Signatory).filter( sql.and_(domain.Signatory.user_id == user_id, domain.Signatory.status.in_( wf.get_state_ids(tagged=["public"]) ), ) ).all() ] if len(signed_pi_ids) > 0: self.query = self.query.union( session.query(domain.Doc).filter( sql.and_( domain.Doc.parliament_id == parliament_id, domain.Doc.status.in_(self.states), domain.Doc.doc_id.in_( signed_pi_ids ) ) ) ) self.query = self.query.order_by( domain.Doc.doc_id.desc() )
def get_feature(discriminator, feature_name): """Get the named workflow feature instance (not necessarily enabled), or None. """ type_info = capi.get_type_info(discriminator) # KeyError if type_info.workflow is not None: return type_info.workflow.get_feature(feature_name)
def __init__(self, context, request): super(ContainerJSONBrowserView, self).__init__(context, request) self.domain_model = proxy.removeSecurityProxy( self.context).domain_model ti = capi.get_type_info(self.domain_model) derived_table_schema = ti.derived_table_schema self.domain_annotation = ti.descriptor_model self.fields = tuple(container.getFields( self.context, derived_table_schema, self.domain_annotation)) # table keys self.table = orm.class_mapper(self.domain_model).mapped_table self.utk = dict( [ (column.key, column) for column in self.table.columns ]) # sort_on defaults: [str] self.defaults_sort_on = getattr(self.domain_annotation, "sort_on", None) # sort_on parameter name: str # pick off request, if necessary setting it from the first name # defined in defaults_sort_on if not self.request.get("sort") and self.defaults_sort_on: self.request.form["sort"] = u"sort_%s" % (self.defaults_sort_on[0]) self.sort_on = request.get("sort") # sort_dir: "desc" | "asc" # pick off request, if necessary setting it from default in # domain model, else "desc" if not self.request.get("dir"): self.request.form["dir"] = unicode( getattr(self.domain_annotation, "sort_dir", "desc")) self.sort_dir = self.request.get("dir") _sort_dir_funcs = dict(asc=sql.asc, desc=sql.desc) self.sort_dir_func = _sort_dir_funcs.get(self.sort_dir, sql.desc)
def __init__(self, context, item_type, filter_states=None, group_filter=False, item_filters={} ): self.context = context self.item_type = item_type type_info = capi.get_type_info(item_type) self.filter_states = (filter_states or type_info.workflow.get_state_ids(tagged=[TAG_SCHEDULE_PENDING]) ) self.group_filter = (group_filter or not IParliament.providedBy(context.group) or IAgendaItem.implementedBy(type_info.domain_model) ) try: self.domain_class = get_schedulable_types()[item_type].get( "domain_model") except KeyError: # !+try/except not necessary? try: self.domain_class = type_info.domain_model except KeyError: raise KeyError("Unable to locate domain class for type %s" % item_type ) self.item_filters = item_filters
def createManagerFactory(domain_class, **params): manager_name = "%sSignatoryManager" % domain_class.__name__ #!+naming if manager_name in globals().keys(): log.error("Signatory manager named %s already exists", manager_name) return ti = capi.get_type_info(domain_class) domain_iface = ti.interface if domain_iface is None: log.error("No model interface for class %s", domain_class) log.error("Not creating Signatory Manager for class %s", domain_class) return globals()[manager_name] = type(manager_name, (SignatoryValidator, ), {}) manager = globals()[manager_name] for config_name, config_value in params.iteritems(): assert config_name in CONFIGURABLE_PARAMS, ( "Check your signatory " "feature configuration for %s. Only these parameters may be " "configured %s" % (domain_class.__name__, CONFIGURABLE_PARAMS)) config_type = type(getattr(manager, config_name)) if config_type in (tuple, list): config_value = map(str.strip, config_value.split()) setattr(manager, config_name, config_type(config_value)) assert set.intersection( set(manager.submitted_states), set(manager.draft_states), set(manager.expire_states)) == set( ), "draft, submitted and expired states must be distinct lists" gsm = getGlobalSiteManager() gsm.registerAdapter(manager, (domain_iface, ), interfaces.ISignatoryManager) # !+IFEATURE_SIGNATORY(mr, oct-2012) this should be included in signatory # feature setup and handling domain_class.allow_sign_document = _allow_sign_document domain_class.allow_withdraw_signature = _allow_withdraw_signature
def __init__(self, context, request): super(ContainerJSONBrowserView, self).__init__(context, request) self.domain_model = proxy.removeSecurityProxy( self.context).domain_model ti = capi.get_type_info(self.domain_model) derived_table_schema = ti.derived_table_schema self.domain_annotation = ti.descriptor_model self.fields = tuple( container.getFields(self.context, derived_table_schema, self.domain_annotation)) # table keys self.table = orm.class_mapper(self.domain_model).mapped_table self.utk = dict([(column.key, column) for column in self.table.columns]) # sort_on defaults: [str] self.defaults_sort_on = getattr(self.domain_annotation, "sort_on", None) # sort_on parameter name: str # pick off request, if necessary setting it from the first name # defined in defaults_sort_on if not self.request.get("sort") and self.defaults_sort_on: self.request.form["sort"] = u"sort_%s" % (self.defaults_sort_on[0]) self.sort_on = request.get("sort") # sort_dir: "desc" | "asc" # pick off request, if necessary setting it from default in # domain model, else "desc" if not self.request.get("dir"): self.request.form["dir"] = unicode( getattr(self.domain_annotation, "sort_dir", "desc")) self.sort_dir = self.request.get("dir") _sort_dir_funcs = dict(asc=sql.asc, desc=sql.desc) self.sort_dir_func = _sort_dir_funcs.get(self.sort_dir, sql.desc)
def reset_zope_schema_properties_on_model_interface(descriptor_cls): type_key = naming.type_key("descriptor_class_name", descriptor_cls.__name__) ti = capi.get_type_info(type_key) domain_model = ti.domain_model sast = alchemist.sa2zs.SQLAlchemySchemaTranslator() domain_table = alchemist.utils.get_local_table(domain_model) # zope.schema field property map zsfp_map = sast.generate_fields(domain_table, descriptor_cls) # apply manually overridden field properties sast.apply_properties(zsfp_map, descriptor_cls) # stuff back onto derived_table_schema derived_table_schema = ti.derived_table_schema assert set(derived_table_schema.names(all=False)) == set(zsfp_map), \ "Incosistency in descriptor %r field lists:\n old:%s\n new:%s" % ( type_key, sorted(set(derived_table_schema.names(all=False))), sorted(set(zsfp_map))) for name in derived_table_schema.names(all=False): # !+ zsfp == descriptor_cls.fields_by_name[name].property zsfp = zsfp_map[name] # !+property.__name__ needed downstream by # zope.formlib.form.FormField __init__() does assert name !?! zsfp.__name__ = name # !+ cannot simply set the property directly on derived_table_schema: # derived_table_schema[f.name] = zsfp # as this gives: # *** TypeError: 'InterfaceClass' object does not support item assignment # So we have to workaround it !! derived_table_schema._InterfaceClass__attrs[name] = zsfp # and we need to notify (only once) that the schema has changed derived_table_schema.changed("localize_descriptors")
def __init__(self, context, item_type, filter_states=None, group_filter=False, item_filters={}): self.context = context self.item_type = item_type type_info = capi.get_type_info(item_type) self.filter_states = ( filter_states or type_info.workflow.get_state_ids(tagged=[TAG_SCHEDULE_PENDING])) self.group_filter = (group_filter or not IParliament.providedBy(context.group) or IAgendaItem.implementedBy(type_info.domain_model)) try: self.domain_class = get_schedulable_types()[item_type].get( "domain_model") except KeyError: # !+try/except not necessary? try: self.domain_class = type_info.domain_model except KeyError: raise KeyError("Unable to locate domain class for type %s" % item_type) self.item_filters = item_filters
def getMenuItems(self, context, request): results = [] unproxied = proxy.removeSecurityProxy(context.__parent__) try: items = unproxied.items() except AttributeError: items = [] for key, info in capi.iter_type_info(): if IScheduleContent.implementedBy(info.domain_model): name = naming.plural(key) if hasattr(unproxied, name): items.append((name, getattr(unproxied, name))) for key, item in items: if not IAlchemistContainer.providedBy(item): continue if not IScheduleContent.implementedBy(item.domain_model): continue type_info = capi.get_type_info(item.domain_model) permission = "bungeni.%s.Add" % ( type_info.workflow_key or naming.type_key("model_name", item.domain_model.__name__)) if not checkPermission(permission, context): continue dc_adapter = IDCDescriptiveProperties(item, None) if dc_adapter: _title = dc_adapter.title else: _title = getattr(item, "title", "Unknown") results.append( dict(title=_title, description=_title, action=url.absoluteURL(item, request), selected=False, icon=None, extra={}, submenu=None)) return results
def getSchedulingContexts(self, request): """Set up scheduling contexts. Currently we include: - committees - plenary """ app = common.get_application() committees = [] contexts = [] # !+GENERALIZE_GROUP #!+HARDWIRING(mb, Aug-2012) unhardwire committees lookup if interfaces.IWorkspaceSchedulingSectionLayer.providedBy(request): committees[:] = app["workspace"]["scheduling"][ "committees"].values() for committee in committees: if committee.active: if capi.get_type_info(committee).workflow.has_feature( "sitting"): contexts.append(schedule.GroupSchedulingContext(committee)) for context in contexts: context.__name__ = u"schedule" if interfaces.IWorkspaceSchedulingSectionLayer.providedBy(request): contexts.append( schedule.ISchedulingContext(app["workspace"]["scheduling"])) if len(contexts): contexts[-1].__name__ = u"" return contexts
def createManagerFactory(domain_class, **params): manager_name = "%sSignatoryManager" % domain_class.__name__ #!+naming if manager_name in globals().keys(): log.error("Signatory manager named %s already exists", manager_name) return ti = capi.get_type_info(domain_class) domain_iface = ti.interface if domain_iface is None: log.error("No model interface for class %s", domain_class) log.error("Not creating Signatory Manager for class %s", domain_class) return globals()[manager_name] = type(manager_name, (SignatoryValidator,), {}) manager = globals()[manager_name] for config_name, config_value in params.iteritems(): assert config_name in CONFIGURABLE_PARAMS, ("Check your signatory " "feature configuration for %s. Only these parameters may be " "configured %s" % (domain_class.__name__, CONFIGURABLE_PARAMS)) config_type = type(getattr(manager, config_name)) if config_type in (tuple, list): config_value = map(str.strip, config_value.split()) setattr(manager, config_name, config_type(config_value)) assert set.intersection( set(manager.submitted_states), set(manager.draft_states), set(manager.expire_states) )==set(), "draft, submitted and expired states must be distinct lists" gsm = getGlobalSiteManager() gsm.registerAdapter(manager, (domain_iface,), interfaces.ISignatoryManager) # !+IFEATURE_SIGNATORY(mr, oct-2012) this should be included in signatory # feature setup and handling domain_class.allow_sign_document = _allow_sign_document domain_class.allow_withdraw_signature = _allow_withdraw_signature
def get_status(self, item_type): # !+ why does item_type not use the standard type_key name as everywhere else? type_key = item_type # !+ what is this method supposed to do? # !+ why does something named "get_status" return a (translated) dict? translated = dict() if not type_key: return translated # !+ why is item_type allowed to be / is the empty string? # !+ why was domain_model being INSTANTIATED IN A LOOP to just get the workflow for it ?!? ti = capi.get_type_info(type_key) workflow, domain_model = ti.workflow, ti.domain_model workspace_config = component.getUtility(IWorkspaceTabsUtility) roles = get_workspace_roles() + ROLES_DIRECTLY_DEFINED_ON_OBJECTS #domain_class = workspace_config.get_domain(item_type) results = set() for role in roles: status = workspace_config.get_status( role, domain_model, self.context.__name__) if status: for s in status: results.add(s) for result in results: #workflow = IWorkflow(domain_model()) status_title = translate( workflow.get_state(result).title, domain="bungeni", context=self.request) translated[result] = status_title return translated
def getSchedulingContexts(self, request): """Set up scheduling contexts. Currently we include: - committees - plenary """ app = common.get_application() committees = [] contexts = [] # !+GENERALIZE_GROUP #!+HARDWIRING(mb, Aug-2012) unhardwire committees lookup if interfaces.IWorkspaceSchedulingSectionLayer.providedBy(request): committees[:] = app["workspace"]["scheduling"]["committees"].values() for committee in committees: if committee.active: if capi.get_type_info(committee).workflow.has_feature("sitting"): contexts.append(schedule.GroupSchedulingContext(committee)) for context in contexts: context.__name__ = u"schedule" if interfaces.IWorkspaceSchedulingSectionLayer.providedBy(request): contexts.append(schedule.ISchedulingContext(app["workspace"]["scheduling"])) if len(contexts): contexts[-1].__name__ = u"" return contexts
def provides_feature(discriminator, feature_name): """Does the domain model identified by discriminator provide the named feature? """ if not (type(discriminator) is type and issubclass(discriminator, domain.Entity)): model = capi.get_type_info(discriminator).domain_model else: model = discriminator return get_feature_interface(feature_name).implementedBy(model)
def values(self): workflow = capi.get_type_info(self.context.domain_model).workflow public_wfstates = workflow.get_state_ids(tagged=["public"], restrict=False) return [ x for x in self.context.values() if checkPermission(view_permission(x), x) and x.status in public_wfstates ]
def get_search_types(types): """string of all searched types (for display)""" _types = str_all_types if types and not ("," in types[0]): type_names = [] for typ in types: info = capi.get_type_info(typ) type_names.append(info.descriptor_model.container_name) _types = ", ".join(type_names) return _types
def is_type_workspaced(self, type_key): """Is this type workspaced within this group context (for user)? """ from bungeni.capi import capi ti = capi.get_type_info(type_key) workspace_feature = ti.workflow.get_feature("workspace") if workspace_feature is not None: group_names = workspace_feature.get_param("group_names") return self.conceptual_name in group_names return False
def handle_add_and_add_another(self, action, data): ob = self.createAndAdd(data) name = self.domain_model.__name__ if not self._next_url: if IWorkspaceMyDocumentsSectionLayer.providedBy(self.request): item_type = capi.get_type_info(ob).workflow_key self._next_url = url.absoluteURL(self.context, self.request) + \ "/add_%s?portal_status_message=%s Added" % \ (item_type, name) else: self._next_url = url.absoluteURL(self.context, self.request) + \ "/add?portal_status_message=%s Added" % name
def add_info_container_to_descriptor(model, container_attr_name, target_type_key, rel_attr_name, indirect_key=None ): """For containers that are defined as part of a feature, need an InfoContainer instance added to the descriptor (corresponding bungeni.alchemist.model.add_container_property_to_model() and creation of SubFormViewlet viewlet are done downstream). """ ti = capi.get_type_info(model) from bungeni.ui.descriptor.localization import add_info_container add_info_container(ti.type_key, ti.descriptor_model.info_containers, container_attr_name, target_type_key, rel_attr_name, indirect_key, viewlet=True, _origin="feature")
def is_type_workspaced(self, type_key): """Is this type workspaced for this !+workspace context (for user)? !+WORKSPACE_GROUP_CONTEXTS should be refined further to specific groups, not just be WorkspaceContainer-wide (for all groups)! """ ti = capi.get_type_info(type_key) workspace_feature = ti.workflow.get_feature("workspace") if workspace_feature is not None: group_names = workspace_feature.get_param("group_names") if group_names: user = utils.get_login_user() for group in utils.get_user_groups(user): if group.conceptual_name in group_names: return True return False
def get_search_doc_types(context): """get types searchable in a context""" types = [] if IWorkspaceSection.providedBy(context): ws_config = zope.component.getUtility(IWorkspaceTabsUtility) roles = get_workspace_roles() wf_types = set() for role in roles: types = [] wf_types.update(*[ wsp.keys() for wsp in ws_config.workspaces[role].values() ]) types = [ capi.get_type_info(typ) for typ in wf_types ] else: types = [ info for key, info in capi.iter_type_info() ] return types
def email_notifications_callback(channel, method, properties, body): message = simplejson.loads(body) ti = capi.get_type_info(message["type"]) workflow = ti.workflow if workflow and workflow.has_feature("email"): recipients = get_recipients(message.get("principal_ids", None)) template = get_template(message["type"] + ".xml") email_block = EmailBlock(template, message) subject, body = email_block.get_email() if is_html(template): msg = MIMEText(body, "html") else: msg = MIMEText(body, "text") msg["Subject"] = subject component.getUtility(IBungeniMailer).send(msg, recipients) channel.basic_ack(delivery_tag=method.delivery_tag)
def get_base_audit_class(model): """Identify what should be the BASE audit class for a {model}Audit class to inherit from, and return it. """ assert interfaces.IFeatureAudit.implementedBy(model), model ti = capi.get_type_info(model) if ti.archetype is None: # !+ should this be allowed to ever happen? # i.e. require each type to declare an archetype? base_audit_class = domain.Audit else: base_audit_class = get_audit_class_for(ti.archetype) if base_audit_class is None: # fallback to get the audit class for the sys archetype base_audit_class = get_audit_class_for(ti.sys_archetype) assert base_audit_class is not None, (model, ti.archetype, base_audit_class) return base_audit_class
def _get_items(self): item_type = capi.get_type_info(self.context).workflow_key query = Session().query(domain.ItemSchedule).filter( sql.and_(domain.ItemSchedule.item_id == self.item_id, domain.ItemSchedule.item_type == item_type)) items = [] for item in query.all(): items.append( dict(sitting_name=IDCDescriptiveProperties(item.sitting).title, sitting_venue=(IDCDescriptiveProperties( item.sitting.venue).title if item.sitting.venue else _(u"Unknown venue")), minutes=[ dict(text=minute.body) for minute in item.itemdiscussions ])) if not items: self.for_display = False return items
def add_info_container_to_descriptor(model, container_attr_name, target_type_key, rel_attr_name, indirect_key=None): """For containers that are defined as part of a feature, need an InfoContainer instance added to the descriptor (corresponding bungeni.alchemist.model.add_container_property_to_model() and creation of SubFormViewlet viewlet are done downstream). """ ti = capi.get_type_info(model) from bungeni.ui.descriptor.localization import add_info_container add_info_container( ti.type_key, ti.descriptor_model.info_containers, container_attr_name, target_type_key, rel_attr_name, indirect_key, viewlet=True, _origin="feature", )
def __init__(self, context, type_key, filter_states=None, group_filter=True, item_filters={} ): self.context = context self.item_type = type_key ti = capi.get_type_info(type_key) self.filter_states = get_schedulable_states(type_key) self.group_filter = group_filter try: self.domain_class = get_schedulable_types()[type_key].get( "domain_model") except KeyError: # !+try/except not necessary? try: self.domain_class = ti.domain_model except KeyError: raise KeyError("Unable to locate domain class for type %s" % type_key ) self.item_filters = item_filters
def list_container_items(container_instance, permission=None): """Generate list of container items with permission check Note that we first try to generate the permission name or fall back to zope.View """ sort_fields = [] from bungeni.core.workflows.utils import view_permission trusted = proxy.removeSecurityProxy(container_instance) domain_model = trusted._class type_info = capi.get_type_info(domain_model) descriptor = type_info.descriptor_model if descriptor.sort_on: order_func = sort_dir_funcs.get(descriptor.sort_dir, sql.desc) for field in descriptor.sort_on: sort_fields.append(order_func(getattr(domain_model, field))) if not permission: permission = view_permission(domain_model) for item in trusted.batch(limit=None, order_by=tuple(sort_fields)): if checkPermission(permission, item): yield item
def getMenuItems(self, context, request): results = [] unproxied = proxy.removeSecurityProxy(context.__parent__) items = [] for key, info in capi.iter_type_info(): if IScheduleContent.implementedBy(info.domain_model): name = naming.plural(key) traverser = component.getMultiAdapter((unproxied, request), IPublishTraverse) try: item = traverser.publishTraverse(request, name) items.append((name, item)) except NotFound: continue for key, item in items: if not IAlchemistContainer.providedBy(item): continue if not IScheduleContent.implementedBy(item.domain_model): continue type_info = capi.get_type_info(item.domain_model) permission = "bungeni.%s.Add" % ( type_info.workflow_key or naming.type_key("model_name", item.domain_model.__name__) ) if not checkPermission(permission, context): continue dc_adapter = IDCDescriptiveProperties(item, None) if dc_adapter: _title = dc_adapter.title else: _title = getattr(item, "title", "Unknown") results.append(dict( title=_title, description=_title, action = url.absoluteURL(item, request), selected=False, icon=None, extra={"id": "nav_calendar_content_%s" % key}, submenu=None, )) return results
def _get_items(self): item_type = capi.get_type_info(self.context).workflow_key query = Session().query(domain.ItemSchedule).filter( sql.and_( domain.ItemSchedule.item_id == self.item_id, domain.ItemSchedule.item_type == item_type ) ) items = [] for item in query.all(): items.append(dict( sitting_name=IDCDescriptiveProperties(item.sitting).title, sitting_venue=( IDCDescriptiveProperties(item.sitting.venue).title if item.sitting.venue else _(u"Unknown venue")), minutes=[ dict(text=minute.body) for minute in item.itemdiscussions ] )) if not items: self.for_display = False return items