class Activity(BehavioralFlowNode, EventHandler): def __init__(self, definition): super(Activity, self).__init__(definition) self.assigned_to = PersistentList() def assigne_to(self, users): if not isinstance(users, (list, tuple)): users = [users] users = [u for u in users if not(u in self.assigned_to)] self.assigned_to.extend(users) def unassigne(self, users): if not isinstance(users, (list, tuple)): users = [users] users = [u for u in users if (u in self.assigned_to)] for user in users: self.assigned_to.remove(user) def set_assignment(self, users=None): self.assigned_to = PersistentList() if users is not None: self.assigne_to(users)
class GalleryAlbum(GalleryContainer): def __init__(self, name, description=None, long_description=None, location=None, date_from=datetime.datetime.now(), date_to=None, parent=None): GalleryContainer.__init__(self, name, description, parent=parent) self.long_description = long_description self.location = location self.date_from = date_from self.date_to = date_to self.__pictures = PersistentList() @property def pictures(self): return self.__pictures def add(self, item): self[item.__name__] = item def insert(self, index, item): GalleryContainer.add(self, item) self.__pictures.insert(index, item) def __setitem__(self, name, item): GalleryContainer.__setitem__(self, name, item) self.__pictures.append(item) def __delitem__(self, name): self.__pictures.remove(self[name]) GalleryContainer.__delitem__(self, name)
class SponsoredQueue(QonPersistent): """A queue to keep Sponsored items that are in pending state. Code refactored out of Voteable. """ def __init__(self): self.queue = PersistentList() def add_to_queue(self, item): self.queue.append(item) def remove_from_queue(self, item): if item in self.queue: self.queue.remove(item) def add_sponsor(self, item, sponsor): """Add sponsor to item.""" item.add_sponsor(sponsor) if item.enough_sponsors(): self.force_accept(item) def get_items_by_state(self, state): items = [item for item in self.queue if item.get_state() == state] return items def new_items(self): return self.get_items_by_state('new') def pending_items(self): return self.get_items_by_state('pending') def force_accept(self, item): item.force_accept() self.remove_from_queue(item)
class OrderedPersistentDict(DictMixin, Persistent): def __init__(self, data=None): self._data = PersistentMapping() self._keylist = PersistentList() if not data is None: self.update(data) def __setitem__(self, key, val): self._data[key] = val if key in self._keylist: self._keylist.remove(key) self._keylist.append(key) def __getitem__(self, key): return self._data[key] def __delitem__(self, key): self._keylist.remove(key) del self._data[key] def keys(self): return self._keylist[:] def reverse(self): items = list(self.items()) items.reverse() return items
class CustomizationMarker(PersistentInterfaceClass, ZMIObject): meta_type = "Silva Customization Marker" silvaconf.icon("markers.png") silvaconf.factory("manage_addCustomizationMarkerForm") silvaconf.factory("manage_addCustomizationMarker") def __init__(self, name, doc=None): self.id = name PersistentInterfaceClass.__init__(self, name=name, bases=(ICustomizableMarker,), __doc__=doc) ZMIObject.__init__(self) self.marked = PersistentList() def updateIdentifier(self): self.__identifier__ = self.markerId() def addMarkedObject(self, obj): oid = getUtility(IIntIds).register(obj) self.marked.append(oid) def removeMarkedObject(self, obj): oid = getUtility(IIntIds).getId(obj) self.marked.remove(oid) def markedObjects(self): resolver = getUtility(IIntIds).getObject return [resolver(oid) for oid in self.marked] def markerId(self): return u"marker:%s" % ".".join(self.getPhysicalPath()[1:])
class ViewletsForType(Persistent): def __init__(self): self.viewlets = PersistentList() self.default = '' self.use_default_viewlets = True self.use_default_default = True def append(self, item): if not item in self.viewlets: self.viewlets.append(item) self.use_default_viewlets = False def remove(self, item, force=False): if item == self.default: if force: self.setDefault(None) else: msg = "Viewlet %s cannot be unregistered :" + \ " it is set as default." msg = msg % item raise CompositePackError, msg if item in self.viewlets: self.viewlets.remove(item) if not self.viewlets: self.use_default_viewlets = True def getList(self): return self.viewlets def clearList(self): while self.viewlets: self.viewlets.pop() self.use_default_viewlets = True def setDefault(self, item): if self.use_default_viewlets or item in self.viewlets or item is None: self.default = item self.use_default_default = False else: msg = "Viewlet %s cannot be set as default :" + \ " it is not registered." msg = msg % item raise CompositePackError, msg def clearDefault(self): self.default = '' self.use_default_default = True def getDefault(self): return self.default def queryDefault(self, default=None): if self.default: return self.default else: return default
class OrderedContainer(Container): """A Grok container that remembers the order of its items. This straightforward extension of the basic `grok.Container` remembers the order in which items have been inserted, so that `keys()`, `values()`, `items()`, and iteration across the container can all return the items in the order they were inserted. The only way of changing the order is to call the `updateOrder()` method. """ implements(interfaces.IOrderedContainer) def __init__(self): super(OrderedContainer, self).__init__() self._order = PersistentList() def keys(self): # Return a copy of the list to prevent accidental modifications. return self._order[:] def __iter__(self): return iter(self.keys()) def values(self): return (self[key] for key in self._order) def items(self): return ((key, self[key]) for key in self._order) def __setitem__(self, key, object): foo = self.has_key(key) # Then do whatever containers normally do. super(OrderedContainer, self).__setitem__(key, object) if not foo: self._order.append(key) def __delitem__(self, key): # First do whatever containers normally do. super(OrderedContainer, self).__delitem__(key) self._order.remove(key) def updateOrder(self, order): """Impose a new order on the items in this container. Items in this container are, by default, returned in the order in which they were inserted. To change the order, provide an argument to this method that is a sequence containing every key already in the container, but in a new order. """ if set(order) != set(self._order): raise ValueError("Incompatible key set.") self._order = PersistentList() self._order.extend(order) notifyContainerModified(self)
def __call__(self): req = self.request settings = Settings(self.context) annotations = settings.annotations if annotations is None: annotations = PersistentDict() settings.annotations = annotations sections = settings.sections if sections is None: sections = PersistentList() settings.sections = sections action = req.form['action'] if action == 'addannotation': page = int(req.form['page']) if page not in annotations: annotations[page] = PersistentList() pageann = annotations[page] data = { "id": random.randint(1, 9999999), "coord": req.form['coord'], "title": req.form.get('title', ''), "content": req.form.get('content', '')} pageann.append(data) return json.dumps(data) elif action == 'removeannotation': page = int(req.form['page']) if page in annotations: ann_id = int(req.form['id']) found = False annotations = annotations[page] for ann in annotations: if ann['id'] == ann_id: found = ann break if found: annotations.remove(found) elif action == 'addsection': data = { 'page': req.form['page'], 'title': req.form['title'] } sections.append(data) return json.dumps(data) elif action == 'removesection': data = { 'page': req.form['page'], 'title': req.form['title'] } if data in sections: sections.remove(data)
class HasMessages: """Mixin class providing message/inbox management.""" __persistenceVersion = 1 _purge_age = timedelta(days=7) # days messages stay in trash _trash_age = timedelta(days=30) # days messages stay in inbox def __init__(self): self.__message_list = PersistentList() def upgradeToVersion1(self): pl = PersistentList(self.__message_list) self.__message_list = pl def has_messages(self): return len(self.__message_list) > 0 def new_messages(self): return [msg for msg in self.__message_list \ if msg.status == 'new'] def old_messages(self): return [msg for msg in self.__message_list \ if msg.status == 'read'] def deleted_messages(self): return [msg for msg in self.__message_list \ if msg.status == 'deleted'] def add_message(self, msg): self.__message_list.append(msg) def trash_old_messages(self): """Call periodically to move old messages, read and undread, to trash.""" cutoff = datetime.utcnow() - self._trash_age for msg in self.__message_list: if msg.status != 'deleted': if msg.date < cutoff: msg.delete() def purge_old_messages(self): """Call periodically to permanently delete old messages out of trash.""" cutoff = datetime.utcnow() - self._trash_age - self._purge_age for msg in self.deleted_messages(): if (msg.date_opened or msg.date) < cutoff: self.__message_list.remove(msg) def message_index(self, msg): return self.__message_list.index(msg) def get_message(self, index): return self.__message_list[index]
class Notes(Persistent): """A list of Note objects. Notes are just a container for Note objects >>> notes = Notes() Add a few notes >>> note1 = Note(title="note1") >>> note2 = Note(title="note2") >>> notes.add(note1) >>> notes.add(note2) Iterate over the notes >>> [n.title for n in notes] ['note1', 'note2'] Remove a note >>> notes.remove(note1.unique_id) >>> [n.title for n in notes] ['note2'] Remove all the notes >>> notes.clear() >>> [n for n in notes] [] """ implements(interfaces.INotes) def __init__(self): self._notes = PersistentList() def __iter__(self): return iter(self._notes) def add(self, note): self._notes.append(note) def remove(self, unique_id): for note in self._notes: if note.unique_id == unique_id: self._notes.remove(note) def clear(self): del self._notes[:]
class OrderedBTreeContainer(BTreeContainer): def __init__(self): super(OrderedBTreeContainer, self).__init__() self._order = PersistentList() def keys(self): return self._order[:] def values(self): return (self[key] for key in self._order) def items(self): return ((key, self[key]) for key in self._order) def __setitem__(self, key, object): exists = key in self if not exists: self._order.append(key) try: super(OrderedBTreeContainer, self).__setitem__(key, object) except Exception as e: if not exists: self._order.remove(key) raise e return key def __delitem__(self, key): super(OrderedBTreeContainer, self).__delitem__(key) self._order.remove(key) def updateOrder(self, order): """Impose a new order on the items in this container. Items in this container are, by default, returned in the order in which they were inserted. To change the order, provide an argument to this method that is a sequence containing every key already in the container, but in a new order. """ if set(order) != set(self._order): raise ValueError("Incompatible key set.") self._order = PersistentList() self._order.extend(order) notifyContainerModified(self)
def setter(self, val, var=varname): sl = getattr(self, var, None) if sl is None: sl = PersistentList() setattr(self, var, sl) if isinstance(val, list) or isinstance(val, PersistentList): sl[:] = [] for v in val: sl.append(v) elif not val.startswith('-DEL-'): if val not in sl: sl.append(val) else: val = val[5:] if val in sl: sl.remove(val) commit()
class Issue(BTreeContainer): implements(IIssue) title = FieldProperty(IIssue["title"]) description = FieldProperty(IIssue["description"]) milestone = None def __init__(self): super(Issue, self).__init__() self._topics = PersistentList() def update_topics(self, titles): dels = [] for ob in self._topics: if ob.title not in titles: dels.append(ob) for ob in dels: self._topics.remove(ob) ob.del_doc(self) if ob.is_empty(): topic_key = ob.__name__ project = get_project() try: del project.topics[topic_key] except KeyError: pass topic_folder = get_project().topics topics_keys = [x.__name__ for x in self._topics] for title in titles: if valid_title_p(title): ob = getObject(topic_folder.search(title)) if ob.__name__ not in topics_keys: self._topics.append(ob) ob.add_doc(self) def get_topics(self): return self._topics
class LayoutsForType(Persistent): def __init__(self): self.viewlets = PersistentList() self.default = '' def append(self, item): if not item in self.viewlets: self.viewlets.append(item) def remove(self, item, force=False): if item == self.getDefault(): if force: self.setDefault(None) else: msg = "Layout %s cannot be unregistered :" + \ " it is set as default." msg = msg % item raise CompositePackError, msg if item in self.viewlets: self.viewlets.remove(item) def getList(self): return self.viewlets def setDefault(self, item): if item in self.viewlets or item is None: self.default = item else: msg = "Layout %s cannot be set as default :" + \ " it is not registered." msg = msg % item raise CompositePackError, msg def getDefault(self): return self.default
class Group: def __init__(self, title): self.title = title self.links = PersistentList() def add_link(self, link): self.links.append(link) def remove_link(self, link): if link in self.links: self.links.remove(link) def reorder_links(self, order): # don't just do a blind re-assignment, reorder them with smarts :) neworder = [] oldorder = self.links[:] for link in order: if link in oldorder: neworder.append(link) oldorder.remove(link) # add any leftover at the end neworder.extend(list(oldorder)) self.links = PersistentList(neworder)
class ResponseContainer(object): ANNO_KEY = 'poi.responses' def __init__(self, context): self.context = context annotations = unprotected_write(IAnnotations(self.context)) self.__mapping = annotations.get(self.ANNO_KEY, None) if self.__mapping is None: self.__mapping = PersistentList() annotations[self.ANNO_KEY] = self.__mapping def __contains__(self, key): '''See interface IReadContainer Taken from zope.app.container.btree. Reimplement this method, since has_key() returns the key if available, while we expect True or False. >>> c = ResponseContainer() >>> "a" in c False >>> c["a"] = 1 >>> "a" in c True >>> "A" in c False ''' return key in self.__mapping has_key = __contains__ def __getitem__(self, i): i = int(i) return self.__mapping.__getitem__(i) def __delitem__(self, item): self.__mapping.__delitem__(item) def __len__(self): return self.__mapping.__len__() def __setitem__(self, i, y): self.__mapping.__setitem__(i, y) def append(self, item): self.__mapping.append(item) def remove(self, item): self.__mapping.remove(item) def add(self, item): if not IResponse.providedBy(item): raise UnaddableError(self, item, "IResponse interface not provided.") self.append(item) id = str(len(self)) event = ObjectAddedEvent(item, newParent=self.context, newName=id) notify(event) def delete(self, id): # We need to fire an ObjectRemovedEvent ourselves here because # self[id].__parent__ is not exactly the same as self, which # in the end means that __delitem__ does not fire an # ObjectRemovedEvent for us. # # Also, now we can say the oldParent is the issue instead of # this adapter. event = ObjectRemovedEvent(self[id], oldParent=self.context, oldName=id) self.remove(self[id]) notify(event)
class TicketTracker(QonPersistent, Watchable): """A ticket/issue tracking system.""" persistenceVersion = 2 def __init__(self, name='', group=None): Watchable.__init__(self) self.group = group self.name = name self.modified = never self.__items = PersistentList() self.__categories = PersistentList() def upgradeToVersion1(self): self.name = iso_8859_to_utf_8(self.name) def add_category(self, category): if category not in self.__categories: self.__categories.append(category) def remove_category(self, category): if category in self.__categories: self.__categories.remove(category) def get_categories(self, sorted=0): cats = self.__categories[:] if sorted: cats.sort() return cats def add_ticket(self, ticket): self.__items.append(ticket) self.modified = datetime.utcnow() self.watchable_changed(self.modified) def new_ticket(self, user, title='', category="feature", priority = 3, text=''): """Create a new ticket and return it.""" ticket = Ticket(user, title, category, priority, text) self.add_ticket(ticket) user.karma_activity_credit() return ticket def get_ticket(self, id): """Return a ticket with the given id (index).""" return self.__items[id] def get_index(self, ticket): """Return index of ticket. Uses cached value if available.""" if hasattr(ticket, '_v_index'): return ticket._v_index else: return self.__items.index(ticket) def new_tickets(self): """Return new tickets.""" return self._tickets_by_state(['new']) def open_tickets(self): """Return open tickets.""" return self._tickets_by_state(['open']) def closed_tickets(self): """Return closed tickets.""" return self._tickets_by_state(['closed']) def active_tickets(self): """Return tickets not new or closed.""" return self._tickets_by_state(['open', 'assigned', 'feedback']) def owned_tickets(self, user, only_open=0): """Return tickets owned (submitted) by user.""" items = [] for i, t in enumerate(self.__items): if t.user is user and (not only_open or not t.is_closed()): t._v_index = i items.append(t) return items def assigned_tickets(self, user, only_open=0): """Return tickets assigned to user, regardless of state.""" items = [] for i, t in enumerate(self.__items): if t.assignee is user and (not only_open or not t.is_closed()): t._v_index = i items.append(t) return items def feedback_tickets(self, user, only_open=0): """Return tickets awaiting feedback from user.""" items = [] for i, t in enumerate(self.__items): if t.is_feedback() and (t.user is user) and (not only_open or not t.is_closed()): t._v_index = i items.append(t) return items def only_open(self, tickets): """Given a list of tickets, eliminate closed tickets.""" return [t for t in tickets if not t.is_closed()] def sort_by_modified(self, tickets): """Given a list of tickets, return sorted newest to oldest by modified.""" bydate = [(t.modified or t.date, t) for t in tickets] bydate.sort() bydate.reverse() return [t for date, t in bydate] def last_modified(self): """Compute and cache last_modified from tickets. """ if self.modified: return self.modified latest = never for t in self.__items: if t.modified > latest: latest = t.modified self.modified = latest return self.modified def watchable_name(self): return self.name def watchable_modified_date(self): return self.last_modified() # ticket methods def add_comment(self, ticket, user, category, priority, text): ticket.add_comment(user, category, priority, text) self.modified = datetime.utcnow() self.watchable_changed(self.modified) def change_status(self, ticket, user, status, category, priority, text): ticket.change_status(user, status, category, priority, text) self.modified = datetime.utcnow() self.watchable_changed(self.modified) def _tickets_by_state(self, state): items = [] for i, t in enumerate(self.__items): if t.status in state: t._v_index = i items.append(t) return items
class ExecutionContext(Object): createds = SharedMultipleProperty('createds', 'creator', False) involveds = SharedMultipleProperty('involveds', 'involvers', False) process = SharedUniqueProperty('process', 'execution_context', True) def __init__(self): super(ExecutionContext, self).__init__() self.parent = None self.sub_execution_contexts = PersistentList() self.properties_names = PersistentList() def _reindex(self): if self.process is not None: self.process.reindex() def root_execution_context(self): """Return the root execution context""" if self.parent is None: return self else: return self.parent.root_execution_context() def add_sub_execution_context(self, ec): """Add a sub execution context. A sub-execution context is associated to a sub-process""" if not (ec in self.sub_execution_contexts): self.sub_execution_contexts.append(ec) ec.parent = self def remove_sub_execution_context(self, ec): if ec in self.sub_execution_contexts: self.sub_execution_contexts.remove(ec) ec.parent = None def _sub_involveds(self): result = list(self.involveds) for sec in self.sub_execution_contexts: result.extend(sec._sub_involveds()) return set(result) def all_involveds(self): """Return all involved entities. The search includes sub-execution contexts""" root = self.root_execution_context() return root._sub_involveds() def _sub_createds(self): result = list(self.createds) for sec in self.sub_execution_contexts: result.extend(sec._sub_createds()) return set(result) def all_createds(self): """Return all created entities. The search includes sub-execution contexts""" root = self.root_execution_context() return root._sub_createds() @property def active_involveds(self): """Return all current relations of type 'involved'""" result = {} properties = dict(self.properties_names) for name in properties.keys(): relation_result = self.get_involved_collection(name) if relation_result: index_key = name + '_index' i = self.get_localdata(index_key) result[name] = { 'name': name, 'type': 'collection', 'assocition_kind': properties[name], 'index': i, 'is_current': True, 'entities': relation_result } continue relation_result = self.involved_entity(name) i = 0 if relation_result is not None: i = len(self.getproperty(name)) relation_result = [relation_result] else: continue result[name] = { 'name': name, 'type': 'element', 'assocition_kind': properties[name], 'index': i, 'is_current': True, 'entities': relation_result } return result def _sub_active_involveds(self): result = dict(self.active_involveds) for sec in self.sub_execution_contexts: sub_active = sec._sub_active_involveds() for key, value in sub_active.items(): if key in result: result[key]['entities'].extend(value['entities']) else: result[key] = value return result def all_active_involveds(self): """Return all current relations of type 'involved'. The search includes sub-execution contexts""" root = self.root_execution_context() return root._sub_active_involveds() @property def classified_involveds(self): """Return all archived relations of type 'involved'""" result = {} properties = dict(self.properties_names) for name in properties.keys(): index_key = name + '_index' if hasattr(self, index_key): index = self.get_localdata(index_key) + 1 for i in range(index)[1:]: prop_name = name + '_' + str(i) self._init_property(prop_name, self.dynamic_properties_def[prop_name]) result[prop_name] = { 'name': name, 'type': 'collection', 'assocition_kind': properties[name], 'index': i, 'is_current': (i == (index - 1)), 'entities': self.getproperty(prop_name) } else: result[name] = { 'name': name, 'type': 'element', 'assocition_kind': properties[name], 'index': -1, 'is_current': None, 'entities': self.involved_entities(name) } return result def _sub_classified_involveds(self): result = dict(self.classified_involveds) for sec in self.sub_execution_contexts: sub_classified = sec._sub_classified_involveds() for key, value in sub_classified.items(): if key in result: result[key]['entities'].extend(value['entities']) else: result[key] = value return result def all_classified_involveds(self): """Return all archived relations of type 'involved'. The search includes sub-execution contexts""" root = self.root_execution_context() return root._sub_classified_involveds() #entity def add_involved_entity(self, name, value, type='involved'): self.addtoproperty('involveds', value) if name in self.dynamic_properties_def: self._init_property(name, self.dynamic_properties_def[name]) self.addtoproperty(name, value) else: self.properties_names.append((name, type)) self.dynamic_properties_def[name] = (SHARED_MULTIPLE, 'involvers', True) self._init_property(name, self.dynamic_properties_def[name]) self.addtoproperty(name, value) self._reindex() def remove_entity(self, name, value): if value in self.involveds: self.delfromproperty('involveds', value) if value in self.createds: self.delfromproperty('createds', value) if name in self.dynamic_properties_def: self._init_property(name, self.dynamic_properties_def[name]) self.delfromproperty(name, value) self._reindex() def add_created_entity(self, name, value): self.addtoproperty('createds', value) self.add_involved_entity(name, value, 'created') # involved_entity start def involved_entity(self, name, index=-1): result = self.get_involved_entity(name, index) if result is not None: return result result = self.find_involved_entity(name, index) if result: return result[0] def get_involved_entity(self, name, index=-1): if name in self.dynamic_properties_def: self._init_property(name, self.dynamic_properties_def[name]) result = self.getproperty(name) if result and index < len(result): return result[index] collection = self.get_involved_collection(name, index) if collection: return collection[0] return None def find_subinvolved_entity(self, name, index=-1): result = self.get_involved_entity(name, index) if result is not None: return [result] else: result = [] for sec in self.sub_execution_contexts: result.extend(sec.find_subinvolved_entity(name, index)) return result def find_involved_entity(self, name, index=-1): root = self.root_execution_context() return root.find_subinvolved_entity(name, index) # involved_entity end # involved_entities start def involved_entities(self, name=None): result = self.get_involved_entities(name) if result: return result result = self.find_involved_entities(name) return result def get_involved_entities(self, name=None): if name is None: return list(self.involveds) if name in self.dynamic_properties_def: self._init_property(name, self.dynamic_properties_def[name]) result = self.getproperty(name) return result result = [] collections = self.get_involved_collections(name) for collection in collections: result.extend(collection) return result def find_subinvolved_entities(self, name=None): result = self.get_involved_entities(name) if result: return result else: for sec in self.sub_execution_contexts: result.extend(sec.find_subinvolved_entities(name)) return result def find_involved_entities(self, name=None): root = self.root_execution_context() return root.find_subinvolved_entities(name) # involved_entities end # created_entity start def created_entity(self, name, index=-1): result = self.get_created_entity(name, index) if result is not None: return result result = self.find_created_entity(name, index) if result: return result[0] return None def get_created_entity(self, name, index=-1): if name in self.dynamic_properties_def: self._init_property(name, self.dynamic_properties_def[name]) result = [e for e in self.getproperty(name) if e in self.createds] if result: return result[index] collection = self.get_created_collection(name, index) if collection: return collection[0] return None def find_subcreated_entity(self, name, index=-1): result = self.get_created_entity(name, index) if result is not None: return [result] else: result = [] for sec in self.sub_execution_contexts: result.extend(sec.find_subcreated_entity(name, index)) return result def find_created_entity(self, name, index=-1): root = self.root_execution_context() return root.find_subcreated_entity(name, index) # created_entity end # created_entities start def created_entities(self, name=None): result = self.get_created_entities(name) if result: return result result = self.find_created_entities(name) return result def get_created_entities(self, name=None): if name is None: return list(self.createds) if name in self.dynamic_properties_def: self._init_property(name, self.dynamic_properties_def[name]) result = [e for e in self.getproperty(name) if e in self.createds] return result return [] def find_created_entities(self, name=None): root = self.root_execution_context() result_created = root.all_createds() result = [e for e in root.find_involved_entities(name) \ if e in result_created] return result # created_entities end # has relation_entity start def has_relation(self, value, name=None): if self.has_localrelation(value, name): return True return self.has_globalrelation(value, name) def has_subrelation(self, value, name=None): if self.has_localrelation(value, name): return True for sec in self.sub_execution_contexts: if sec.has_subrelation(value, name): return True return False def has_globalrelation(self, value, name=None): root = self.root_execution_context() return root.has_subrelation(value, name) def has_localrelation(self, value, name=None): if name is None: return value in self.involveds entities = self.get_involved_entities(name) if entities and value in entities: return True return False # has relation_entity end #collections def add_involved_collection(self, name, values, type='involved'): prop_name = name index_key = name + '_index' if not hasattr(self, index_key): self.add_data(index_key, 0) index = self.get_localdata(index_key) + 1 self.add_data(index_key, index) name = name + '_' + str(index) for value in values: self.addtoproperty('involveds', value) if name in self.dynamic_properties_def: self._init_property(name, self.dynamic_properties_def[name]) self.addtoproperty(name, value) else: self.properties_names.append((prop_name, type)) self.dynamic_properties_def[name] = (SHARED_MULTIPLE, 'involvers', True) self._init_property(name, self.dynamic_properties_def[name]) self.addtoproperty(name, value) self._reindex() def remove_collection(self, name, values): index_key = name + '_index' if not hasattr(self, index_key): return index = self.get_localdata(index_key) name = name + '_' + str(index) for value in values: self.remove_entity(name, value) def add_created_collection(self, name, values): for value in values: self.addtoproperty('createds', value) self.add_involved_collection(name, values, 'created') # involved_collection start def involved_collection(self, name, index=-1): result = self.get_involved_collection(name, index) if result: return result result = self.find_involved_collection(name, index) if result: return result[0] return [] def get_involved_collection(self, name, index=-1): index_key = name + '_index' if not hasattr(self, index_key): return [] if index == -1: index = self.get_localdata(index_key) elif index > self.get_localdata(index_key): return [] name = name + '_' + str(index) if name in self.dynamic_properties_def: self._init_property(name, self.dynamic_properties_def[name]) return self.getproperty(name) return [] def find_subinvolved_collection(self, name, index=-1): result = self.get_involved_collection(name, index) if result: return [result] else: for sec in self.sub_execution_contexts: result.extend(sec.find_subinvolved_collection(name, index)) return result def find_involved_collection(self, name, index=-1): root = self.root_execution_context() return root.find_subinvolved_collection(name, index) # involved_collection end # involved_collections start def involved_collections(self, name=None): result = self.get_involved_collections(name) if result: return result result = self.find_involved_collections(name) return result def get_involved_collections(self, name=None): if name is None: return list(self.involveds) index_key = name + '_index' result = [] if hasattr(self, index_key): for index in range(self.get_localdata(index_key)): result.append(self.get_involved_collection(name, (index + 1))) return result def find_subinvolved_collections(self, name=None): result = self.get_involved_collections(name) if result: return [result] else: result = [] for sec in self.sub_execution_contexts: result.extend(sec.find_subinvolved_collections(name)) return result def find_involved_collections(self, name=None): root = self.root_execution_context() return root.find_subinvolved_collections(name) # involved_collections end # created_collection start def created_collection(self, name, index=-1): result = self.get_created_collection(name, index) if result: return result result = self.find_created_collection(name, index) if result: return result[0] return [] def get_created_collection(self, name, index=-1): collections = self.get_involved_collection(name, index) if not collections: return [] result = [e for e in collections if e in self.createds] return result def find_subcreated_collection(self, name, index=-1): result = self.get_created_collection(name, index) if result: return [result] else: result = [] for sec in self.sub_execution_contexts: result.extend(sec.find_subcreated_collection(name, index)) return result def find_created_collection(self, name, index=-1): root = self.root_execution_context() return root.find_subcreated_collection(name, index) # created_collection end # created_collections start def created_collections(self, name=None): result = self.get_created_collections(name) if result: return result result = self.find_created_collections(name) return result def get_created_collections(self, name=None): if name is None: return list(self.createds) index_key = name + '_index' result = [] if hasattr(self, index_key): for index in range(self.get_localdata(index_key)): result.append(self.get_created_collection(name, (index + 1))) return result def find_subcreated_collections(self, name): result = self.get_created_collections(name) if result: return [result] else: result = [] for sec in self.sub_execution_contexts: result.extend(sec.find_subcreated_collections(name)) return result def find_created_collections(self, name=None): root = self.root_execution_context() return root.find_subcreated_collections(name) # created_collections end #Data def add_data(self, name, data): if not hasattr(self, name): setattr(self, name, PersistentList()) getattr(self, name).append(data) def get_data(self, name, index=-1): data = self.get_localdata(name, index) if data is not None: return data datas = self.find_data(name, index) return datas[0] def get_localdata(self, name, index=-1): if hasattr(self, name): datas = getattr(self, name) if index == -1 or index < len(datas): return getattr(self, name)[index] return None def find_subdata(self, name, index=-1): result = self.get_localdata(name, index) if result is not None: return [result] else: result = [] for sec in self.sub_execution_contexts: result.extend(sec.find_subdata(name, index)) return result def find_data(self, name, index=-1): root = self.root_execution_context() return root.find_subdata(name, index)
def checkTheWorld(self): # Test constructors u = PersistentList() u0 = PersistentList(l0) u1 = PersistentList(l1) u2 = PersistentList(l2) uu = PersistentList(u) uu0 = PersistentList(u0) uu1 = PersistentList(u1) uu2 = PersistentList(u2) v = PersistentList(tuple(u)) class OtherList: def __init__(self, initlist): self.__data = initlist def __len__(self): return len(self.__data) def __getitem__(self, i): return self.__data[i] v0 = PersistentList(OtherList(u0)) vv = PersistentList("this is also a sequence") # Test __repr__ eq = self.assertEqual eq(str(u0), str(l0), "str(u0) == str(l0)") eq(repr(u1), repr(l1), "repr(u1) == repr(l1)") eq(repr(u2), repr(l2), "repr(u2) == repr(l2)") # Test __cmp__ and __len__ # Py3: No cmp() or __cmp__ anymore. if PY2: def mycmp(a, b): r = cmp(a, b) if r < 0: return -1 if r > 0: return 1 return r all = [l0, l1, l2, u, u0, u1, u2, uu, uu0, uu1, uu2] for a in all: for b in all: eq(mycmp(a, b), mycmp(len(a), len(b)), "mycmp(a, b) == mycmp(len(a), len(b))") # Test __getitem__ for i in range(len(u2)): eq(u2[i], i, "u2[i] == i") # Test __setitem__ uu2[0] = 0 uu2[1] = 100 try: uu2[2] = 200 except IndexError: pass else: self.fail("uu2[2] shouldn't be assignable") # Test __delitem__ del uu2[1] del uu2[0] try: del uu2[0] except IndexError: pass else: self.fail("uu2[0] shouldn't be deletable") # Test __getslice__ for i in range(-3, 4): eq(u2[:i], l2[:i], "u2[:i] == l2[:i]") eq(u2[i:], l2[i:], "u2[i:] == l2[i:]") for j in range(-3, 4): eq(u2[i:j], l2[i:j], "u2[i:j] == l2[i:j]") # Test __setslice__ for i in range(-3, 4): u2[:i] = l2[:i] eq(u2, l2, "u2 == l2") u2[i:] = l2[i:] eq(u2, l2, "u2 == l2") for j in range(-3, 4): u2[i:j] = l2[i:j] eq(u2, l2, "u2 == l2") uu2 = u2[:] uu2[:0] = [-2, -1] eq(uu2, [-2, -1, 0, 1], "uu2 == [-2, -1, 0, 1]") uu2[0:] = [] eq(uu2, [], "uu2 == []") # Test __contains__ for i in u2: self.assertTrue(i in u2, "i in u2") for i in min(u2) - 1, max(u2) + 1: self.assertTrue(i not in u2, "i not in u2") # Test __delslice__ uu2 = u2[:] del uu2[1:2] del uu2[0:1] eq(uu2, [], "uu2 == []") uu2 = u2[:] del uu2[1:] del uu2[:1] eq(uu2, [], "uu2 == []") # Test __add__, __radd__, __mul__ and __rmul__ #self.assertTrue(u1 + [] == [] + u1 == u1, "u1 + [] == [] + u1 == u1") self.assertTrue(u1 + [1] == u2, "u1 + [1] == u2") #self.assertTrue([-1] + u1 == [-1, 0], "[-1] + u1 == [-1, 0]") self.assertTrue(u2 == u2 * 1 == 1 * u2, "u2 == u2*1 == 1*u2") self.assertTrue(u2 + u2 == u2 * 2 == 2 * u2, "u2+u2 == u2*2 == 2*u2") self.assertTrue(u2 + u2 + u2 == u2 * 3 == 3 * u2, "u2+u2+u2 == u2*3 == 3*u2") # Test append u = u1[:] u.append(1) eq(u, u2, "u == u2") # Test insert u = u2[:] u.insert(0, -1) eq(u, [-1, 0, 1], "u == [-1, 0, 1]") # Test pop u = PersistentList([0, -1, 1]) u.pop() eq(u, [0, -1], "u == [0, -1]") u.pop(0) eq(u, [-1], "u == [-1]") # Test remove u = u2[:] u.remove(1) eq(u, u1, "u == u1") # Test count u = u2 * 3 eq(u.count(0), 3, "u.count(0) == 3") eq(u.count(1), 3, "u.count(1) == 3") eq(u.count(2), 0, "u.count(2) == 0") # Test index eq(u2.index(0), 0, "u2.index(0) == 0") eq(u2.index(1), 1, "u2.index(1) == 1") try: u2.index(2) except ValueError: pass else: self.fail("expected ValueError") # Test reverse u = u2[:] u.reverse() eq(u, [1, 0], "u == [1, 0]") u.reverse() eq(u, u2, "u == u2") # Test sort u = PersistentList([1, 0]) u.sort() eq(u, u2, "u == u2") # Test extend u = u1[:] u.extend(u2) eq(u, u1 + u2, "u == u1 + u2")
class SelectedPeriodsSchedule(Persistent, Schedule): implements(interfaces.ISelectedPeriodsSchedule) # XXX: think about storing intid here # or maybe better - a relationship timetable = None _periods = None consecutive_periods_as_one = False def __init__(self, timetable, *args, **kw): Schedule.__init__(self, *args, **kw) self.timetable = timetable self._periods = PersistentList() @property def periods(self): result = [] for day in self.timetable.periods.templates.values(): result.extend([period for period in day.values() if self.hasPeriod(period)]) return result def periodKey(self, period): day = period.__parent__ return (day.__name__, period.__name__) def hasPeriod(self, period): key = self.periodKey(period) return key in self._periods def addPeriod(self, period): key = self.periodKey(period) if key not in self._periods: self._periods.append(key) def removePeriod(self, period): key = self.periodKey(period) if key in self._periods: self._periods.remove(key) def iterMeetings(self, date, until_date=None): if self.timetable is None: return meetings = iterMeetingsInTimezone( self.timetable, self.timezone, date, until_date=until_date) selected_periods = list(self.periods) last_meeting = None for orig_meeting in meetings: # XXX: proxy issues may breed here if orig_meeting.period in selected_periods: meeting = orig_meeting if (self.consecutive_periods_as_one and last_meeting is not None and meeting.dtstart.date() == last_meeting.dtstart.date()): period_ids = meeting.period.__parent__.keys() idx = period_ids.index(meeting.period.__name__) if (idx > 0 and period_ids[idx-1] == last_meeting.period.__name__): meeting = meeting.clone( meeting_id=last_meeting.meeting_id) last_meeting = orig_meeting yield meeting
class NotificationTool(Folder): """ """ meta_type = core_constants.METATYPE_NOTIFICATIONTOOL icon = 'misc_/NaayaCore/NotificationTool.gif' meta_types = () all_meta_types = meta_types security = ClassSecurityInfo() # default configuration settings default_config = { 'admin_on_error': True, 'admin_on_edit': True, 'enable_instant': True, 'enable_daily': True, 'enable_anonymous': False, # Enable anonymous notifications 'daily_hour': 0, 'enable_weekly': True, 'weekly_day': 1, # 1 = monday, 7 = sunday 'weekly_hour': 0, 'enable_monthly': True, 'monthly_day': 1, # 1 = first day of the month 'monthly_hour': 0, 'notif_content_types': [], } def __init__(self, id, title): """ """ self.id = id self.title = title self.config = PersistentDict(self.default_config) self.timestamps = PersistentDict() # Confirmations list self.pending_anonymous_subscriptions = PersistentList() def get_config(self, key): return self.config.get(key) def get_location_link(self, location): if location: return self.restrictedTraverse(location, self.getSite()).absolute_url() else: return self.getSite().absolute_url() def _validate_subscription(self, **kw): """ Validate add/edit subscription for authorized and anonymous users """ if (kw['notif_type'] not in self.available_notif_types(kw['location']) and not (kw['notif_type'] == 'administrative' and self.checkPermissionPublishObjects())): raise i18n_exception(ValueError, 'Subscribing to ${notif_type} ' 'notifications in "${location}" not allowed', location=kw['location'] or self.getSite().title, notif_type=kw['notif_type']) try: obj = self.getSite().restrictedTraverse(kw['location']) except: raise i18n_exception(ValueError, 'This path is invalid or protected') try: subscription_container = ISubscriptionContainer(obj) except: raise i18n_exception(ValueError, 'Cannot subscribe to this folder') if kw.get('anonymous', False): # Check if subscription exists for this anonymous subscriber if not is_valid_email(kw.get('email', '')): raise i18n_exception( ValueError, 'Your e-mail address does not appear ' 'to be valid.') for id, subscription in subscription_container.list_with_keys(): # Normal subscriptions don't have e-mail if isinstance(subscription, AnonymousSubscription): if (subscription.email == kw['email'] and subscription.notif_type == kw['notif_type'] and subscription.lang == kw['lang']): raise i18n_exception(ValueError, 'Subscription already exists') def _sitemap_dict(self, form): """ Compose a sitemap dict """ node = form.get('node', '') if not node or node == '/': node = '' def traverse(objects, level=0, stop_level=2, exclude_root=False): """ Create a dict with node properties and children. This is a fixed level recursion. On some sites there are a lot of objects so we don't need to get the whole tree. """ res = [] for ob in objects: if ISubscriptionTarget.providedBy(ob) is False: continue children_objects = [] if level != stop_level: # Stop if the level is reached # Create a list of object's children if hasattr(ob, 'objectValues'): # Get only naaya container objects for child in ob.objectValues( self.get_naaya_containers_metatypes()): # Skip unsubmited/unapproved if not getattr(child, 'approved', False): continue elif not getattr(child, 'submitted', False): continue else: children_objects.append(child) if hasattr(ob, 'approved'): icon = ob.approved and ob.icon or ob.icon_marked else: icon = ob.icon children = traverse(children_objects, level + 1, stop_level) if exclude_root: # Return only the children if this is set return children res.append({ 'data': { 'title': self.utStrEscapeHTMLTags( self.utToUtf8(ob.title_or_id())), 'icon': icon }, 'attributes': { 'title': path_in_site(ob) }, 'children': children }) return res if node == '': tree_dict = traverse([self.getSite()]) else: tree_dict = traverse([self.restrictedTraverse(node)], exclude_root=True) return tree_dict security.declarePublic('sitemap') def sitemap(self, REQUEST=None, **kw): """ Return a json (for Ajax tree) representation of published objects marked with `ISubscriptionTarget` including the portal organized in a tree (sitemap) """ form = {} if REQUEST is not None: form = REQUEST.form REQUEST.RESPONSE.setHeader('content-type', 'application/json') else: form.update(kw) return json.dumps(self._sitemap_dict(form)) security.declarePrivate('add_account_subscription') def add_account_subscription(self, user_id, location, notif_type, lang, content_types=[]): """ Subscribe the user `user_id` """ self._validate_subscription(user_id=user_id, location=location, notif_type=notif_type, lang=lang, content_types=content_types) try: self.remove_account_subscription(user_id, location, notif_type, lang) except ValueError: pass obj = self.getSite().restrictedTraverse(location) subscription_container = ISubscriptionContainer(obj) subscription = AccountSubscription(user_id, notif_type, lang, content_types) subscription_container.add(subscription) security.declarePrivate('add_anonymous_subscription') def add_anonymous_subscription(self, **kw): """ Handle anonymous users """ self._validate_subscription(anonymous=True, **kw) subscription = AnonymousSubscription(**kw) # Add to temporary container self.pending_anonymous_subscriptions.append(subscription) # Send email email_tool = self.getSite().getEmailTool() email_from = email_tool.get_addr_from() email_template = EmailPageTemplateFile('emailpt/confirm.zpt', globals()) email_data = email_template.render_email(**{ 'key': subscription.key, 'here': self }) email_to = subscription.email email_tool.sendEmail(email_data['body_text'], email_to, email_from, email_data['subject']) security.declarePrivate('remove_account_subscription') def remove_account_subscription(self, user_id, location, notif_type, lang, content_types=None): obj = self.getSite().restrictedTraverse(location) subscription_container = ISubscriptionContainer(obj) n = utils.match_account_subscription(subscription_container, user_id, notif_type, lang, content_types) if n is None: raise ValueError('Subscription not found') subscription_container.remove(n) security.declarePrivate('unsubscribe_links_html') unsubscribe_links_html = PageTemplateFile("emailpt/unsubscribe_links.zpt", globals()) security.declarePrivate('remove_anonymous_subscription') def remove_anonymous_subscription(self, email, location, notif_type, lang): try: obj = self.getSite().restrictedTraverse(location) except: raise i18n_exception(ValueError, 'Invalid location') try: subscription_container = ISubscriptionContainer(obj) except: raise i18n_exception(ValueError, 'Invalid container') anonymous_subscriptions = [ (n, s) for n, s in subscription_container.list_with_keys() if hasattr(s, 'email') ] subscriptions = filter( lambda s: (s[1].email == email and s[1].location == location and s[ 1].notif_type == notif_type), anonymous_subscriptions) if len(subscriptions) == 1: subscription_container.remove(subscriptions[0][0]) else: raise i18n_exception(ValueError, 'Subscription not found') security.declareProtected(view, 'available_notif_types') def available_notif_types(self, location=''): if self.config['enable_instant']: yield 'instant' if self.config['enable_daily']: yield 'daily' if self.config['enable_weekly']: yield 'weekly' if self.config['enable_monthly']: yield 'monthly' security.declarePrivate('notify_maintainer') def notify_maintainer(self, ob, folder, **kwargs): """ Process and notify by email that B{p_object} has been uploaded into the B{p_folder}. """ auth_tool = self.getSite().getAuthenticationTool() emails = self.getMaintainersEmails(ob) person = self.REQUEST.AUTHENTICATED_USER.getUserName() if len(emails) > 0: maintainers_data = {} for email in emails: maintainers_data[email] = { 'ob': ob, 'here': self, 'person': auth_tool.name_from_userid(person), 'ob_edited': kwargs.get('ob_edited'), 'approved': ob.approved, 'container_basket': '%s/basketofapprovals_html' % folder.absolute_url(), } notif_logger.info('Maintainer notifications on %r', ofs_path(ob)) template = self._get_template('maintainer') self._send_notifications(maintainers_data, template) security.declarePrivate('notify_comment_maintainer') def notify_comment_maintainer(self, comment, parent, **kwargs): """ Process and notify by email that a comment B{comemnt} has been added to the object B{parent}. """ auth_tool = self.getSite().getAuthenticationTool() emails = self.getMaintainersEmails(parent) if len(emails) > 0: maintainers_data = {} for email in emails: maintainers_data[email] = { 'parent': parent, 'here': self, 'comment': comment, 'person': auth_tool.name_from_userid(comment.author), 'container_basket': '%s/basketofapprovals_html' % parent.absolute_url(), } notif_logger.info('Maintainer comment notifications on %r', ofs_path(parent)) template = self._get_template('maintainer') self._send_notifications(maintainers_data, template) security.declarePrivate('notify_administrative') def notify_administrative(self, ob, user_id, ob_edited=False): """ send administrative notifications because object `ob` was added or edited by the user `user_id` """ auth_tool = self.getSite().getAuthenticationTool() subscribers_data = utils.get_subscribers_data( self, ob, notif_type='administrative', **{ 'person': auth_tool.name_from_userid(user_id), 'ob_edited': ob_edited, 'approved': ob.approved, 'container_basket': '%s/basketofapprovals_html' % ob.aq_parent.absolute_url(), }) if len(subscribers_data.keys()) > 0: notif_logger.info('Administrative notifications on %r', ofs_path(ob)) template = self._get_template('administrative') self._send_notifications(subscribers_data, template) security.declarePrivate('notify_comment_administrative') def notify_comment_administrative(self, comment, parent, user_id): """ send administrative notifications because a comment was added to object `ob` by the user `user_id` """ auth_tool = self.getSite().getAuthenticationTool() subscribers_data = utils.get_subscribers_data( self, parent, notif_type='administrative', **{ 'comment': comment, 'parent': parent, 'here': self, 'person': auth_tool.name_from_userid(user_id), }) if len(subscribers_data.keys()) > 0: notif_logger.info('Administrative comment notifications on %r', ofs_path(parent)) template = self._get_template('administrative') self._send_notifications(subscribers_data, template) security.declarePrivate('notify_instant') def notify_instant(self, ob, user_id, ob_edited=False): """ send instant notifications because object `ob` was changed by the user `user_id` """ if not self.config['enable_instant']: return # Don't send notifications if the object is unapproved, but store them # into a queue to send them later when it becomes approved if not ob.approved: return auth_tool = self.getSite().getAuthenticationTool() subscribers_data = utils.get_subscribers_data( self, ob, **{ 'person': auth_tool.name_from_userid(user_id), 'ob_edited': ob_edited, }) if len(subscribers_data.keys()) > 0: notif_logger.info('Instant notifications on %r', ofs_path(ob)) template = self._get_template('instant') self._send_notifications(subscribers_data, template) security.declarePrivate('notify_comment_instant') def notify_comment_instant(self, comment, parent, user_id): """ send instant notifications because a comment was added to object `ob` by the user `user_id` """ if not self.config['enable_instant']: return # Don't send notifications if the object is unapproved, but store them # into a queue to send them later when it becomes approved if not parent.approved: return auth_tool = self.getSite().getAuthenticationTool() subscribers_data = utils.get_subscribers_data( self, parent, **{ 'comment': comment, 'parent': parent, 'person': auth_tool.name_from_userid(user_id), }) if len(subscribers_data.keys()) > 0: notif_logger.info('Comment instant notifications on %r', ofs_path(parent)) template = self._get_template('instant') self._send_notifications(subscribers_data, template) security.declarePrivate('notify_account_modification') def notify_account_modification(self, email, obj, username=None, new_roles=[], removed_roles=[]): """ Send notification that the user received or lost one or more roles in the specified location """ email_data = { email: { 'new_roles': new_roles, 'removed_roles': removed_roles, 'username': username, 'obj': obj, } } notif_logger.info('Account modification notification on %s' % self.getSite().getId()) template = self._get_template('account_modified') self._send_notifications(email_data, template) def _get_template(self, name): template = self._getOb('emailpt_%s' % name, None) if template is not None: return template.render_email template = self._getOb(name, None) if template is not None: return template.render_email template = email_templates.get(name, None) if template is not None: return template.render_email raise ValueError('template for %r not found' % name) def _send_notifications(self, messages_by_email, template): """ Send the notifications described in the `messages_by_email` data structure, using the specified EmailTemplate. `messages_by_email` should be a dictionary, keyed by email address. The values should be dictionaries suitable to be passed as kwargs (options) to the template. """ portal = self.getSite() email_tool = portal.getEmailTool() addr_from = email_tool.get_addr_from() for addr_to, kwargs in messages_by_email.iteritems(): translate = self.portal_i18n.get_translation kwargs.update({'portal': portal, '_translate': translate}) mail_data = template(**kwargs) notif_logger.info('.. sending notification to %r', addr_to) utils.send_notification(email_tool, addr_from, addr_to, mail_data['subject'], mail_data['body_text']) def _send_newsletter(self, notif_type, when_start, when_end): """ We'll look in the ``Products.Naaya.NySite.getActionLogger`` for object creation/modification log entries. Then we'll send notifications for the period between `when_start` and `when_end` using the `notif_type` template. """ notif_logger.info( 'Notifications newsletter on site %r, type %r, ' 'from %s to %s', ofs_path(self.getSite()), notif_type, when_start, when_end) objects_by_email = {} langs_by_email = {} subscriptions_by_email = {} anonymous_users = {} for log_type, ob in utils.get_modified_objects(self.getSite(), when_start, when_end): notif_logger.info('.. modified object: %r', ofs_path(ob)) for subscription in utils.fetch_subscriptions(ob, inherit=True): if subscription.notif_type != notif_type: continue if not subscription.check_permission(ob): continue email = subscription.get_email(ob) if email is None: continue content_types = getattr(subscription, 'content_types', []) if content_types and ob.meta_type not in content_types: continue notif_logger.info('.. .. sending newsletter to %r', email) objects_by_email.setdefault(email, []).append({ 'ob': ob, 'type': log_type, }) langs_by_email[email] = subscription.lang subscriptions_by_email[email] = subscription anonymous_users[email] = isinstance(subscription, AnonymousSubscription) messages_by_email = {} for email in objects_by_email: messages_by_email[email] = { 'objs': objects_by_email[email], '_lang': langs_by_email[email], 'subscription': subscriptions_by_email[email], 'here': self, 'anonymous': anonymous_users[email] } template = self._get_template(notif_type) self._send_notifications(messages_by_email, template) def _cron_heartbeat(self, when): transaction.commit() # commit earlier stuff; fresh transaction transaction.get().note('notifications cron at %s' % ofs_path(self)) # Clean temporary subscriptions after a week: if self.config.get('enable_anonymous', False): a_week_ago = when - timedelta(weeks=1) for tmp_subscription in self.pending_anonymous_subscriptions[:]: if tmp_subscription.datetime <= a_week_ago: self.pending_anonymous_subscriptions.remove( tmp_subscription) # daily newsletter ### if self.config['enable_daily']: # calculate the most recent daily newsletter time daily_time = time(hour=self.config['daily_hour']) latest_daily = datetime.combine(when.date(), daily_time) if latest_daily > when: latest_daily -= timedelta(days=1) # check if we should send a daily newsletter prev_daily = self.timestamps.get('daily', when - timedelta(days=1)) if prev_daily < latest_daily < when: self._send_newsletter('daily', prev_daily, when) self.timestamps['daily'] = when # weekly newsletter ### if self.config['enable_weekly']: # calculate the most recent weekly newsletter time weekly_time = time(hour=self.config['daily_hour']) t = datetime.combine(when.date(), weekly_time) days_delta = self.config['weekly_day'] - t.isoweekday() latest_weekly = t + timedelta(days=days_delta) if latest_weekly > when: latest_weekly -= timedelta(weeks=1) # check if we should send a weekly newsletter prev_weekly = self.timestamps.get('weekly', when - timedelta(weeks=1)) if prev_weekly < latest_weekly < when: self._send_newsletter('weekly', prev_weekly, when) self.timestamps['weekly'] = when # monthly newsletter ### if self.config['enable_monthly']: # calculate the most recent monthly newsletter time monthly_time = time(hour=self.config['monthly_hour']) the_day = utils.set_day_of_month(when.date(), self.config['monthly_day']) latest_monthly = datetime.combine(the_day, monthly_time) if latest_monthly > when: latest_monthly = utils.minus_one_month(latest_monthly) # check if we should send a monthly newsletter prev_monthly = self.timestamps.get('monthly', utils.minus_one_month(when)) if prev_monthly < latest_monthly < when: self._send_newsletter('monthly', prev_monthly, when) self.timestamps['monthly'] = when transaction.commit() # make sure our timestamp updates are saved def index_html(self, RESPONSE): """ redirect to admin page """ RESPONSE.redirect(self.absolute_url() + '/my_subscriptions_html') security.declareProtected(view, 'my_subscriptions_html') my_subscriptions_html = NaayaPageTemplateFile( 'zpt/index', globals(), 'naaya.core.notifications.my_subscriptions') security.declarePrivate('list_user_subscriptions') def user_subscriptions(self, user, cutoff_level=None): """ Returns all user subscriptions in the portal. Use with caution as this iterates almost all the objects in site. You can use `cutoff_level` to limit the depth. """ out = [] user_id = user.getId() for obj, n, subscription in utils.walk_subscriptions( self.getSite(), cutoff_level): if not isinstance(subscription, AccountSubscription): continue if subscription.user_id != user_id: continue out.append({ 'object': obj, 'notif_type': subscription.notif_type, 'content_types': getattr(subscription, 'content_types', []), 'lang': subscription.lang }) return out security.declareProtected(view, 'user_not_found') def user_not_found(self, REQUEST): """ Returns True if the user is not Anonymous, but is still not found by the AuthenticationTool (i.e. is maybe defined in the Zope root) """ user = REQUEST.AUTHENTICATED_USER if not isinstance(user, basestring): # with LDAP authentication, user is LDAP user instance user = user.id acl_tool = self.getAuthenticationTool() if acl_tool.get_user_with_userid(user) is None: return True security.declareProtected(view, 'list_my_subscriptions') def list_my_subscriptions(self, REQUEST): """ Returns a list of mappings (location, notif_type, lang) for all subscriptions of logged-in user """ user = REQUEST.AUTHENTICATED_USER if user.getId() is None and not self.config.get( 'enable_anonymous', False): raise Unauthorized # to force login subscriptions = self.user_subscriptions(user) for subscription in subscriptions: subscription['location'] = path_in_site(subscription['object']) del subscription['object'] return subscriptions security.declareProtected(view, 'my_first_subscription') def get_location_subscription(self, location, notif_type=None): """ Returns the first of the authenticated user's subscriptions in location """ for subscription in self.list_my_subscriptions(self.REQUEST): if subscription['location'] == location: if notif_type: if subscription['notif_type'] == notif_type: return subscription else: return subscription security.declareProtected(view, 'subscribe_me') def subscribe_me(self, REQUEST, location, notif_type, lang=None, content_types=[]): """ add subscription for currently-logged-in user """ # Even if some content types were selected (by turning off javascript) # they should be ignored, no filtering in administrative notifications if notif_type == 'administrative': content_types = [] if isinstance(content_types, basestring): content_types = [content_types] if lang is None: lang = self.gl_get_selected_language() REQUEST.form['lang'] = lang user_id = REQUEST.AUTHENTICATED_USER.getId() if location == '/': location = '' if user_id is None and not self.config.get('enable_anonymous', False): raise Unauthorized # to force login try: if user_id: self.add_account_subscription(user_id, location, notif_type, lang, content_types) if content_types: self.setSessionInfoTrans( 'You will receive ${notif_type} notifications' ' for any changes in "${location}" for objects of ' 'types ${content_types}.', notif_type=notif_type, location=location or self.getSite().title, content_types=', '.join(content_types)) else: self.setSessionInfoTrans( 'You will receive ${notif_type} notifications' ' for any changes in "${location}".', notif_type=notif_type, location=location) else: self.add_anonymous_subscription(**dict(REQUEST.form)) self.setSessionInfoTrans( 'An activation e-mail has been sent to ${email}. ' 'Follow the instructions to subscribe to ${notif_type} ' 'notifications for any changes in "${location}".', notif_type=notif_type, location=location, content_types=content_types, email=REQUEST.form.get('email')) except ValueError, msg: self.setSessionErrors([unicode(msg)]) return REQUEST.RESPONSE.redirect(self.absolute_url() + '/my_subscriptions_html')
class Person(User, SearchableEntity, CorrelableEntity, Debatable): """Person class""" type_title = _('Person') icon = 'icon glyphicon glyphicon-user' #'icon novaideo-icon icon-user' templates = { 'default': 'novaideo:views/templates/person_result.pt', 'bloc': 'novaideo:views/templates/person_bloc.pt', 'small': 'novaideo:views/templates/small_person_result.pt', 'popover': 'novaideo:views/templates/person_popover.pt', 'card': 'novaideo:views/templates/person_card.pt', 'header': 'novaideo:views/templates/person_header.pt', } default_picture = 'novaideo:static/images/user100.png' name = renamer() tokens = CompositeMultipleProperty('tokens') tokens_ref = SharedMultipleProperty('tokens_ref') organization = SharedUniqueProperty('organization', 'members') events = SharedMultipleProperty('events', 'author') picture = CompositeUniqueProperty('picture') cover_picture = CompositeUniqueProperty('cover_picture') ideas = SharedMultipleProperty('ideas', 'author') selections = SharedMultipleProperty('selections') working_groups = SharedMultipleProperty('working_groups', 'members') wg_participations = SharedMultipleProperty('wg_participations', 'wating_list_participation') old_alerts = SharedMultipleProperty('old_alerts') following_channels = SharedMultipleProperty('following_channels', 'members') folders = SharedMultipleProperty('folders', 'author') questions = SharedMultipleProperty('questions', 'author') challenges = SharedMultipleProperty('challenges', 'author') ballots = CompositeMultipleProperty('ballots') mask = SharedUniqueProperty('mask', 'member') def __init__(self, **kwargs): self.branches = PersistentList() self.keywords = PersistentList() super(Person, self).__init__(**kwargs) kwargs.pop('password', None) self.set_data(kwargs) self.set_title() self.last_connection = datetime.datetime.now(tz=pytz.UTC) self._read_at = OOBTree() self.guide_tour_data = PersistentDict({}) self.confidence_index = 0 self._notes = OOBTree() self.allocated_tokens = OOBTree() self.len_allocated_tokens = PersistentDict({}) self.reserved_tokens = PersistentList([]) self._submited_at = OOBTree() self._reported_at = OOBTree() def __setattr__(self, name, value): super(Person, self).__setattr__(name, value) if name == 'organization' and value: self.init_contents_organizations() def get_len_tokens(self, root=None, exclude_reserved_tokens=False): root = root or getSite() return root.tokens_mini if exclude_reserved_tokens \ else root.tokens_mini + len(self.reserved_tokens) def get_len_evaluations(self, exclude_reserved_tokens=False): total = self.len_allocated_tokens.get(Evaluations.support, 0) + \ self.len_allocated_tokens.get(Evaluations.oppose, 0) if exclude_reserved_tokens: return total - len([ o for o in self.reserved_tokens if o in self.allocated_tokens ]) return total def get_len_free_tokens(self, root=None, exclude_reserved_tokens=False): root = root or getSite() return self.get_len_tokens(root, exclude_reserved_tokens) - \ self.get_len_evaluations(exclude_reserved_tokens) def has_token(self, obj=None, root=None): root = root or getSite() obj_oid = get_oid(obj, None) if obj_oid and obj_oid in self.reserved_tokens: return obj_oid not in self.allocated_tokens return self.get_len_free_tokens(root, True) > 0 def add_token(self, obj, evaluation_type, root=None): if self.has_token(obj, root): self.allocated_tokens[get_oid(obj)] = evaluation_type self.len_allocated_tokens.setdefault(evaluation_type, 0) self.len_allocated_tokens[evaluation_type] += 1 def remove_token(self, obj): obj_oid = get_oid(obj) if obj_oid in self.allocated_tokens: evaluation_type = self.allocated_tokens.pop(obj_oid) self.len_allocated_tokens.setdefault(evaluation_type, 0) self.len_allocated_tokens[evaluation_type] -= 1 def add_reserved_token(self, obj): obj_oid = get_oid(obj) if obj_oid not in self.reserved_tokens: self.reserved_tokens.append(obj_oid) def remove_reserved_token(self, obj): obj_oid = get_oid(obj) if obj_oid in self.reserved_tokens: self.reserved_tokens.remove(obj_oid) def evaluated_objs(self, evaluation_type=None): if evaluation_type: return [ get_obj(key) for value, key in self.allocated_tokens.byValue( evaluation_type) ] return [get_obj(key) for key in self.allocated_tokens.keys()] def evaluated_objs_ids(self, evaluation_type=None): if evaluation_type: return [ key for value, key in self.allocated_tokens.byValue( evaluation_type) ] return list(self.allocated_tokens.keys()) def init_contents_organizations(self): novaideo_catalog = find_catalog('novaideo') dace_catalog = find_catalog('dace') organizations_index = novaideo_catalog['organizations'] object_authors_index = novaideo_catalog['object_authors'] object_provides_index = dace_catalog['object_provides'] query = object_authors_index.any([get_oid(self)]) & \ object_provides_index.any( [Iidea.__identifier__, IProposal.__identifier__]) & \ organizations_index.any([0]) for entity in query.execute().all(): entity.init_organization() entity.reindex() def set_read_date(self, channel, date): self._read_at[get_oid(channel)] = date def get_read_date(self, channel): return self._read_at.get(get_oid(channel), datetime.datetime.now(tz=pytz.UTC)) def get_channel(self, user): all_channels = list(self.channels) all_channels.extend(list(getattr(user, 'channels', []))) for channel in all_channels: if user in channel.members and self in channel.members: return channel return None def addtoproperty(self, name, value, moving=None): super(Person, self).addtoproperty(name, value, moving) if name == 'selections': value.len_selections = getattr(value, 'len_selections', 0) value.len_selections += 1 def delfromproperty(self, name, value, moving=None): super(Person, self).delfromproperty(name, value, moving) if name == 'selections': value.len_selections = getattr(value, 'len_selections', 0) if value.len_selections > 0: value.len_selections -= 1 def set_title(self): if getattr(self, 'pseudonym', ''): self.title = self.pseudonym else: self.title = getattr(self, 'first_name', '') + ' ' + \ getattr(self, 'last_name', '') def add_note(self, user, context, note, date, time_constant): self._notes[date] = (get_oid(user), get_oid(context), note) self.calculate_confidence_index(time_constant) def get_questions(self, user): if user is self: return self.questions + getattr(self.mask, 'questions', []) return self.questions def get_ideas(self, user): if user is self: return self.ideas + getattr(self.mask, 'ideas', []) return self.ideas def get_working_groups(self, user): if user is self: return self.working_groups + getattr(self.mask, 'working_groups', []) return self.working_groups @property def proposals(self): return [wg.proposal for wg in self.working_groups] def get_proposals(self, user): if user is self: return self.proposals + getattr(self.mask, 'proposals', []) return self.proposals @property def contacts(self): return [s for s in self.selections if isinstance(s, Person)] @property def participations(self): result = [ p for p in list(self.proposals) if any(s in p.state for s in [ 'amendable', 'open to a working group', 'votes for publishing', 'votes for amendments' ]) ] return result def get_participations(self, user): if user is self: return self.participations + getattr(self.mask, 'participations', []) return self.participations @property def contents(self): result = [i for i in list(self.ideas) if i is i.current_version] result.extend(self.proposals) result.extend(self.questions) result.extend(self.challenges) result.extend(self.events) return result def get_contents(self, user): if user is self: return self.contents + getattr(self.mask, 'contents', []) return self.contents @property def active_working_groups(self): return [p.working_group for p in self.participations] def get_active_working_groups(self, user): if user is self: return self.active_working_groups + getattr( self.mask, 'active_working_groups', []) return self.active_working_groups def get_wg_participations(self, user): if user is self: return self.wg_participations + getattr(self.mask, 'wg_participations', []) return self.wg_participations @property def is_published(self): return 'active' in self.state @property def managed_organization(self): return get_objects_with_role(user=self, role='OrganizationResponsible') def get_confidence_index(self): return getattr(self, 'confidence_index', 0) def reindex(self): super(Person, self).reindex() root = getSite() self.__access_keys__ = PersistentList(generate_access_keys(self, root)) def get_picture_url(self, kind, default): if self.picture: img = getattr(self.picture, kind, None) if img: return img.url return default def get_more_contents_criteria(self): "return specific query, filter values" return None, None def set_organization(self, organization): current_organization = self.organization if organization: if current_organization is not organization: is_manager = current_organization and has_role( ('OrganizationResponsible', current_organization), self, ignore_superiors=True) if current_organization and is_manager: revoke_roles( self, (('OrganizationResponsible', current_organization), )) self.setproperty('organization', organization) elif current_organization: is_manager = has_role( ('OrganizationResponsible', current_organization), self, ignore_superiors=True) if is_manager: revoke_roles( self, (('OrganizationResponsible', current_organization), )) self.delfromproperty('organization', current_organization) @property def all_alerts(self): novaideo_catalog = find_catalog('novaideo') dace_catalog = find_catalog('dace') alert_keys_index = novaideo_catalog['alert_keys'] alert_exclude_keys_index = novaideo_catalog['alert_exclude_keys'] object_provides_index = dace_catalog['object_provides'] exclude = [str(get_oid(self))] if self.mask: exclude.append(str(get_oid(self.mask))) query = object_provides_index.any([IAlert.__identifier__]) & \ alert_keys_index.any(self.get_alerts_keys()) & \ alert_exclude_keys_index.notany(exclude) return query.execute() @property def alerts(self): old_alerts = [get_oid(a) for a in self.old_alerts] result = self.all_alerts def exclude(result_set, docids): filtered_ids = list(result_set.ids) for _id in docids: if _id in docids and _id in filtered_ids: filtered_ids.remove(_id) return result_set.__class__(filtered_ids, len(filtered_ids), result_set.resolver) return exclude(result, old_alerts) def get_alerts_keys(self): result = ['all', str(get_oid(self))] if self.mask: result.append(str(get_oid(self.mask))) return result def get_alerts(self, alerts=None, kind=None, subject=None, **kwargs): if alerts is None: alerts = self.alerts if kind: alerts = [a for a in alerts if a.is_kind_of(kind)] if subject: alerts = [a for a in alerts if subject in a.subjects] if kwargs: alerts = [a for a in alerts if a.has_args(**kwargs)] return alerts def calculate_confidence_index(self, time_constant): now = datetime.datetime.utcnow().timestamp() notes = np.array([v[2] for v in self._notes.values()]) dates = np.array([int(t.timestamp()) for t in self._notes.keys()]) time_c = time_constant * 86400 confidence_index = np.sum( np.dot(notes, np.exp(-np.log(2) * (now - dates) / time_c))) self.confidence_index = round(confidence_index, 1) @property def user_groups(self): groups = list(self.groups) if self.organization: groups.append(self.organization) if self.mask: groups.append(self.mask) return groups @property def user_locale(self): locale = getattr(self, 'locale', None) if not locale: locale = getSite(self).locale return locale def _init_mask(self, root): if not self.mask: mask = Mask() root.addtoproperty('masks', mask) self.setproperty('mask', mask) def get_mask(self, root=None): root = root if root else getSite() if not getattr(root, 'anonymisation', False): return self self._init_mask(root) return self.mask def add_submission(self, obj): now = datetime.datetime.now(tz=pytz.UTC) self._submited_at[now] = get_oid(obj) def add_report(self, obj): now = datetime.datetime.now(tz=pytz.UTC) self._reported_at[now] = get_oid(obj) def can_submit_idea(self, root=None): root = root if root else getSite() now = datetime.datetime.now(tz=pytz.UTC) monday = datetime.datetime.combine((now - datetime.timedelta(days=7)), datetime.time(0, 0, 0, tzinfo=pytz.UTC)) return len(self._submited_at.values(min=monday, max=now)) < getattr( root, 'nb_submission_maxi', 3) def can_report(self, root=None): root = root if root else getSite() now = datetime.datetime.now(tz=pytz.UTC) monday = datetime.datetime.combine((now - datetime.timedelta(days=7)), datetime.time(0, 0, 0, tzinfo=pytz.UTC)) return len(self._reported_at.values(min=monday, max=now)) < getattr( root, 'nb_reports_maxi', 3)
class SelectedPeriodsSchedule(Persistent, Schedule): implements(interfaces.ISelectedPeriodsSchedule) # XXX: think about storing intid here # or maybe better - a relationship timetable = None _periods = None consecutive_periods_as_one = False def __init__(self, timetable, *args, **kw): Schedule.__init__(self, *args, **kw) self.timetable = timetable self._periods = PersistentList() @property def periods(self): result = [] for day in self.timetable.periods.templates.values(): result.extend([period for period in day.values() if self.hasPeriod(period)]) return result def periodKey(self, period): day = period.__parent__ return (day.__name__, period.__name__) def hasPeriod(self, period): key = self.periodKey(period) return key in self._periods def addPeriod(self, period): key = self.periodKey(period) if key not in self._periods: self._periods.append(key) def removePeriod(self, period): key = self.periodKey(period) if key in self._periods: self._periods.remove(key) def iterMeetings(self, date, until_date=None): if self.timetable is None: return meetings = iterMeetingsInTimezone(self.timetable, self.timezone, date, until_date=until_date) selected_periods = list(self.periods) last_meeting = None for orig_meeting in meetings: # XXX: proxy issues may breed here if orig_meeting.period in selected_periods: meeting = orig_meeting if ( self.consecutive_periods_as_one and last_meeting is not None and meeting.dtstart.date() == last_meeting.dtstart.date() ): period_ids = meeting.period.__parent__.keys() idx = period_ids.index(meeting.period.__name__) if idx > 0 and period_ids[idx - 1] == last_meeting.period.__name__: meeting = meeting.clone(meeting_id=last_meeting.meeting_id) last_meeting = orig_meeting yield meeting
class Participants(SimpleItem): security = ClassSecurityInfo() title = "Meeting participants" def __init__(self, id): """ """ self.id = id self.uids = PersistentList() self.administrator_uid = None def findUsers(self, search_param, search_term): """ """ def userMatched(uid, cn): if search_param == 'uid': return search_term in uid if search_param == 'cn': return search_term in cn def schema_has_param(acl_folder, param): for item in acl_folder.getLDAPSchema(): if item[0] == param: return True return False auth_tool = self.getAuthenticationTool() ret = [] for user in auth_tool.getUsers(): uid = auth_tool.getUserAccount(user) cn = auth_tool.getUserFullName(user) info = 'Local user' if userMatched(uid, cn): ret.append({'uid': uid, 'cn': cn, 'info': info}) for source in auth_tool.getSources(): acl_folder = source.getUserFolder() if schema_has_param(acl_folder, search_param): users = acl_folder.findUser(search_param=search_param, search_term=search_term) for user in users: uid = user['uid'] cn = user['cn'] info = user['dn'] ret.append({'uid': uid, 'cn': cn, 'info': info}) return ret def findUsersWithRole(self, search_role): """ """ auth_tool = self.getAuthenticationTool() ret = [] for source in auth_tool.getSources(): acl_folder = source.getUserFolder() users = source.getUsersByRole(acl_folder, [(search_role, None)]) for user in users: uid = user['uid'] if isinstance(uid, list): uid = uid[0] cn = user['cn'] if isinstance(cn, list): cn = cn[0] info = user['dn'] ret.append({'uid': uid, 'cn': cn, 'info': info}) return ret def _add_user(self, uid): if uid in self.uids: return self.aq_parent.manage_setLocalRoles(uid, [PARTICIPANT_ROLE]) self.uids.append(uid) def addUsers(self, REQUEST): """ """ uids = REQUEST.form['uids'] assert isinstance(uids, list) for uid in uids: self._add_user(uid) return REQUEST.RESPONSE.redirect(self.absolute_url()) def _remove_user(self, uid): self.aq_parent.manage_delLocalRoles([uid]) self.uids.remove(uid) def removeUsers(self, REQUEST): """ """ uids = REQUEST.form['uids'] assert isinstance(uids, list) for uid in uids: self._remove_user(uid) return REQUEST.RESPONSE.redirect(self.absolute_url()) security.declareProtected(change_permissions, 'setAdministrator') def setAdministrator(self, uid, REQUEST=None): """ """ old_admin = self.administrator_uid if uid: self.aq_parent.manage_delLocalRoles([uid]) self.aq_parent.manage_setLocalRoles(uid, ['Administrator']) self.administrator_uid = uid else: self.administrator_uid = None if old_admin: self.aq_parent.manage_delLocalRoles([old_admin]) if old_admin in self.uids: self.aq_parent.manage_setLocalRoles(old_admin, [PARTICIPANT_ROLE]) if REQUEST is not None: REQUEST.RESPONSE.redirect(self.absolute_url()) def getParticipants(self, sort_on=''): """ """ site = self.getSite() key = None if sort_on == 'o': key = lambda x: getUserOrganisation(site, x) elif sort_on == 'name': key = lambda x: getUserFullName(site, x) elif sort_on == 'email': key = lambda x: getUserEmail(site, x) elif sort_on == 'uid': key = lambda x: x if key is None: return self.uids return sorted(self.uids, key=key) def getParticipantInfo(self, uid): """ """ site = self.getSite() name = getUserFullName(site, uid) email = getUserEmail(site, uid) organisation = getUserOrganisation(site, uid) phone = getUserPhoneNumber(site, uid) return {'uid': uid, 'name': name, 'email': email, 'organisation': organisation, 'phone': phone} security.declareProtected(view, 'userCanChangePermissions') def userCanChangePermissions(self): """ """ return self.checkPermission(change_permissions) security.declareProtected(view, 'index_html') def index_html(self, REQUEST): """ """ return self.getFormsTool().getContent({'here': self}, 'meeting_participants') security.declareProtected(view, 'pickrole_html') def pickrole_html(self, REQUEST): """ """ return self.getFormsTool().getContent({'here': self}, 'meeting_participants_pickrole')
class RecordContainer(Persistent): """ Base/default record container uses PersistentDict for entry storage and PersistentList to store ordered keys. This base container class does not advocate one place of storage for the container in a ZODB over another, so subclass implementations may choose to implement a container within a placeful (e.g. OFS or CMF Content item) or placeless (local utility) storage context. Only a placeless context is supported by direct users of this class (without subclassing). For a container with hundreds of items or more, consider using instead BTreeRecordContainer as an implementation or base class, as it should handle memory usage and insert performance much better for larger sets of records. Usage ----- RecordContainer acts as CRUD controller for working with records. The RecordContainer is an addressible object in the system, either as a registered utility (or with a subclass as "contentish" (CMF) content). Records themselves are not content, but data that are possibly non-atomic elements of an atomic content item (if the container is implemented in a subclass of RecordContainer as contentish). Usage: ------ We need a record container object: >>> from uu.record.base import Record, RecordContainer >>> container = RecordContainer() >>> from uu.record.interfaces import IRecordContainer >>> assert IRecordContainer.providedBy(container) Record containers have length and containment checks: >>> assert len(container) == 0 >>> import uuid # keys for entries are stringified UUIDs >>> randomuid = str(uuid.uuid4()) >>> assert randomuid not in container >>> assert container.get(randomuid, None) is None And they have keys/values/items methods like a mapping: >>> assert container.keys() == () >>> assert container.values() == () >>> assert container.items() == () # of course, these are empty now. Before we add records to a container, we need to create them; there are two possible ways to do this: >>> from uu.record.base import Record >>> entry1 = Record() >>> entry2 = container.create() # preferred factory Both factory mechanisms create an entry item with a record_uid attribute: >>> from uu.record.interfaces import IRecord >>> assert IRecord.providedBy(entry1) >>> assert IRecord.providedBy(entry2) >>> is_uuid = lambda u: isinstance(u, str) and len(u) == 36 >>> assert is_uuid(entry1.record_uid) >>> assert is_uuid(entry2.record_uid) And, these are RFC 4122 UUIDs, so even randomly generated 128-bit ids have near zero chance of collision: >>> assert entry1.record_uid != entry2.record_uid >>> assert entry2.record_uid != randomuid The record objects provide plone.uuid.interfaces.IAttributeUUID as an alternative way to get the UUID value (string representation) by adapting to IUUID: >>> from zope.configuration import xmlconfig >>> import plone.uuid >>> c = xmlconfig.file('configure.zcml', plone.uuid) # load registrations >>> from plone.uuid.interfaces import IUUID, IAttributeUUID >>> from zope.component import queryAdapter >>> assert IAttributeUUID.providedBy(entry1) >>> assert queryAdapter(entry1, IUUID) is not None >>> assert queryAdapter(entry1, IUUID) == entry1.record_uid Now when we have a parent context with a schema, the created entries will be signed with the schema and provide it. RecordContainer.create() is the preferred factory when processing data. This is because it can take a mapping of keys/values, and copy each field name/value onto object attributes -- if and only if the attribute in question matches a type whitelist and a name blacklist filter. >>> entry4 = container.create(data={'record_uid':randomuid}) >>> assert entry4.record_uid == randomuid >>> entry5 = container.create(data={'count':5}) >>> assert entry5.count == 5 >>> entry6 = container.create(data={'_bad_name' : True, ... 'count' : 2, ... 'bad_value' : lambda x: x }) >>> assert not hasattr(entry6, '_bad_name') # no leading underscores >>> assert entry6.count == 2 >>> assert not hasattr(entry6, 'bad_value') # function not copied! Of course, merely using the record container object as a factory for new records does not mean they are stored within (yet): >>> assert entry4.record_uid not in container >>> assert entry4.record_uid not in container.keys() Let's add an item: >>> container.add(entry4) There are two ways to check for containment, by either key or value: >>> assert entry4 in container >>> assert entry4.record_uid in container We can get records using a (limited, read) mapping-like interface: >>> assert len(container) == 1 # we just added the first entry >>> assert container.values()[0] is entry4 >>> assert container.get(entry4.record_uid) is entry4 >>> assert container[entry4.record_uid] is entry4 We can deal with references to entries also NOT in the container: >>> import uuid >>> randomuid = str(uuid.uuid4()) >>> assert randomuid not in container >>> assert container.get(str(uuid.uuid4()), None) is None >>> assert entry1.record_uid not in container And we can check containment on either an instance or a UID; checking on an instance is just a convenience that uses its UID (record_uid) field to check for actual containment: >>> assert entry4.record_uid in container >>> assert entry4 in container # shortcut! However, it should be noted for good measure: >>> assert entry4 in container.values() >>> assert entry4.record_uid in container.keys() >>> assert entry4 not in container.keys() # of course! >>> assert (entry4.record_uid, entry4) in container.items() We can modify a record contained directly; this is the most direct and low-level update interface for any entry: >>> _marker = object() >>> assert getattr(entry4, 'title', _marker) is _marker >>> entry4.title = u'Curious George' >>> assert container.get(entry4.record_uid).title == u'Curious George' We can add another record: >>> container.add(entry6) >>> assert entry6 in container >>> assert entry6.record_uid in container >>> assert len(container) == 2 Keys, values, items are always ordered; since we added entry4, then entry6 previously, they will return in that order: >>> expected_order = (entry4, entry6) >>> expected_uid_order = tuple([e.record_uid for e in expected_order]) >>> expected_items_order = tuple(zip(expected_uid_order, expected_order)) >>> assert tuple(container.keys()) == expected_uid_order >>> assert tuple(container.values()) == expected_order >>> assert tuple(container.items()) == expected_items_order We can re-order this; let's move entry6 up to position 0 (first): >>> container.reorder(entry6, offset=0) >>> expected_order = (entry6, entry4) >>> expected_uid_order = tuple([e.record_uid for e in expected_order]) >>> expected_items_order = tuple(zip(expected_uid_order, expected_order)) >>> assert tuple(container.keys()) == expected_uid_order >>> assert tuple(container.values()) == expected_order >>> assert tuple(container.items()) == expected_items_order We can also re-order by UID instead of record/entry reference: >>> container.reorder(entry6.record_uid, offset=1) # where it was before >>> expected_order = (entry4, entry6) >>> expected_uid_order = tuple([e.record_uid for e in expected_order]) >>> expected_items_order = tuple(zip(expected_uid_order, expected_order)) >>> assert tuple(container.keys()) == expected_uid_order >>> assert tuple(container.values()) == expected_order >>> assert tuple(container.items()) == expected_items_order And we can remove records from containment by UID or by reference (note, del(container[key]) uses __delitem__ since a container is a writable mapping): >>> del(container[entry6]) >>> assert entry6 not in container >>> assert entry6.record_uid not in container >>> assert len(container) == 1 >>> assert entry4 in container >>> del(container[entry4.record_uid]) >>> assert entry4 not in container >>> assert len(container) == 0 Earlier, direct update of objects was demonstrated: get an object and modify its properties. This attribute-setting mechanism is the best low-level interface, but it does not (a) support a wholesale update from either a field dictionary/mapping nor another object providing IRecord needing its data to be copied; nor (b) support notification of zope.lifecycle object events. Given these needs, a high level interface for update exists, with the record object acting as a controller for updating contained entries. This provides for update via another entry (a field-by-field copy) or from a data dictionary/mapping. >>> newuid = str(uuid.uuid4()) >>> data = { 'record_uid' : newuid, ... 'title' : u'George', ... 'count' : 9, ... } >>> assert len(container) == 0 # empty, nothing in there yet! >>> assert newuid not in container Note, update() returns an entry; return value can be ignored if caller deems it not useful. >>> entry = container.update(data) >>> assert newuid in container # update implies adding! >>> assert entry is container.get(newuid) >>> assert entry.title == data['title'] >>> assert entry.count == data['count'] Now, the entry we just modified was also added. We can modify it again: >>> data = { 'record_uid' : newuid, ... 'title' : u'Curious George', ... 'count' : 2, ... } >>> entry = container.update(data) >>> assert newuid in container # same uid >>> entry.title u'Curious George' >>> entry.count 2 >>> assert len(container) == 1 # same length, nothing new was added. We could also create a stand-in entry for which data is copied to the permanent entry with the same UUID on update: >>> temp_entry = container.create() >>> temp_entry.record_uid = newuid # overwrite with the uid of entry >>> temp_entry.title = u'Monkey jumping on the bed' >>> temp_entry.count = 0 temp_entry is a stand-in which we will pass to update(), when we really intend to modify entry (they have the same UID): >>> real_entry = container.update(temp_entry) >>> assert container.get(newuid) is not temp_entry >>> assert container.get(newuid) is entry # still the same object... >>> assert container.get(newuid) is real_entry >>> entry.title # ...but data is modified! u'Monkey jumping on the bed' >>> entry.count 0 >>> assert len(container) == 1 # same length, nothing new was added. JSON integration ---------------- As a convenience, update_all() parses JSON into a data dict for use by update(), using the Python 2.6 json library (aka/was: simplejson): >>> party_form = RecordContainer() >>> entry = party_form.create() >>> party_form.add(entry) >>> data = { # mock data we'll serialize to JSON ... 'record_uid': entry.record_uid, # which record to update ... 'name' : 'Me', ... 'birthday' : u'77/06/01', ... 'party_time': u'11/06/05 12:00', ... } >>> import json # requires Python >= 2.6 >>> data['name'] = 'Chunky monkey' >>> serialized = json.dumps([data,], indent=2) # JSON array of one item... >>> print serialized # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE [ { "party_time": "11/06/05 12:00", "birthday": "77/06/01", "name": "Chunky monkey", "record_uid": "..." } ] The JSON created above is useful enough for demonstration, despite being only a single-item list. >>> assert getattr(entry, 'name', _marker) is _marker # before, no attr >>> party_form.update_all(serialized) >>> entry.name # after update u'Chunky monkey' update_all() also takes a singular record, not just a JSON array: >>> data['name'] = 'Curious George' >>> serialized = json.dumps(data, indent=2) # JSON object, not array. >>> print serialized # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE { "party_time": "11/06/05 12:00", "birthday": "77/06/01", "name": "Curious George", "record_uid": "..." } >>> entry.name # before u'Chunky monkey' >>> party_form.update_all(serialized) >>> entry.name # after update u'Curious George' JSON parsing also supports a "bundle" or wrapper object around a list of entries, where the wrapper contains metadata about the form itself, not its entries (currently, this is just the process_changes field, which is sourced from the JSON bundle/wrapper object field called 'notes'). When wrapped, the list of entries is named 'entries' inside the wrapper. >>> data['name'] = u'Party monkey' >>> serialized = json.dumps({'notes' : 'something changed', ... 'entries' : [data,]}, ... indent=2) # JSON array of one item... >>> entry.name # before u'Curious George' >>> party_form.update_all(serialized) >>> entry.name # after u'Party monkey' It should be noted that update_all() removes entries not in the data payload, and it preserves the order contained in the JSON entries. Object events ------------- CRUD methods on a controlling object should have some means of extension, pluggable to code that should subscribe to CRUD (object lifecycle) events. We notify four distinct zope.lifecycleevent object event types: 1. Object created (zope.lifecycleevent.interfaces.IObjectCreatedEvent) 2. Object addded to container: (zope.lifecycleevent.interfaces.IObjectAddedEvent). 3. Object modified (zope.lifecycleevent.interfaces.IObjectModifiedEvent) 4. Object removed (zope.lifecycleevent.interfaces.IObjectRemovedEvent) Note: the create() operation both creates and modifies: as such, both created and modified events are fired off, and since most creations also are followed by an add() to a container, you may have three events to subscribe to early in a new entry's lifecycle. First, some necessary imports of events and the @adapter decorator: >>> from zope.component import adapter >>> from zope.lifecycleevent import IObjectCreatedEvent >>> from zope.lifecycleevent import IObjectModifiedEvent >>> from zope.lifecycleevent import IObjectRemovedEvent >>> from zope.lifecycleevent import IObjectAddedEvent Let's define dummy handlers: >>> @adapter(IRecord, IObjectCreatedEvent) ... def handle_create(context, event): ... print 'object created' ... >>> @adapter(IRecord, IObjectModifiedEvent) ... def handle_modify(context, event): ... print 'object modified' ... >>> @adapter(IRecord, IObjectRemovedEvent) ... def handle_remove(context, event): ... print 'object removed' ... >>> @adapter(IRecord, IObjectAddedEvent) ... def handle_add(context, event): ... print 'object added' ... Next, let's configure zope.event to use zope.component event subscribers; most frameworks using zope.lifecycleevent already do this, but we will configure this explicitly for documentation and testing purposes, only if not already enabled: >>> import zope.event >>> from zope.component import getGlobalSiteManager >>> gsm = getGlobalSiteManager() Importing zope.component.event puts dispatch() in zope.event.subscribers: >>> from zope.component import event >>> assert event.dispatch in zope.event.subscribers Now, let's register the handlers: >>> for h in (handle_create, handle_modify, handle_remove, handle_add): ... gsm.registerHandler(h) ... Usually, these handlers will be registered in the global site manager via ZCML and zope.configuration, but they are registered in Python above for documentation/testing purposes. We can watch these event handlers get fired when CRUD methods are called. Object creation, with and without data: >>> newentry = container.create() # should print 'object created' object created >>> another_uid = str(uuid.uuid4()) >>> newentry = container.create({'count':88}) object modified object created Object addition: >>> container.add(newentry) object added >>> Object removal: >>> del(container[newentry.record_uid]) # via __delitem__() object removed Object update (existing object): >>> entry = container.values()[0] >>> entry = container.update({'record_uid' : entry.record_uid, ... 'title' : u'Me'}) object modified Object modified (new object or not contained): >>> random_uid = str(uuid.uuid4()) >>> entry = container.update({'record_uid' : random_uid, ... 'title' : u'Bananas'}) object modified object created object added Event handlers for modification can know what fields are modified; let's create a more interesting modification handler that prints the names of changed fields. >>> from zope.lifecycleevent.interfaces import IAttributes >>> unregistered = gsm.unregisterHandler(handle_modify) >>> @adapter(IRecord, IObjectModifiedEvent) ... def handle_modify(context, event): ... if event.descriptions: ... attr_desc = [d for d in event.descriptions ... if (IAttributes.providedBy(d))] ... if attr_desc: ... field_names = attr_desc[0].attributes ... print tuple(field_names) >>> gsm.registerHandler(handle_modify) >>> entry = container.values()[0] >>> entry = container.update({'record_uid' : entry.record_uid, ... 'title' : u'Hello'}) ('title',) Finally, clean up and remove all the dummy handlers: >>> for h in (handle_create, handle_modify, handle_remove, handle_add): ... success = gsm.unregisterHandler(h) ... """ implements(IRecordContainer) # whitelist types of objects to copy on data update: TYPE_WHITELIST = ( int, long, str, unicode, bool, float, time.time, datetime, date, timedelta, decimal.Decimal, ) SEQUENCE_WHITELIST = ( list, tuple, set, frozenset, PersistentList, ) MAPPING_WHITELIST = ( dict, PersistentDict, ) RECORD_INTERFACE = IRecord factory = Record def __init__(self, factory=Record, _impl=PersistentDict): self._entries = _impl() self._order = PersistentList() self.factory = factory # IWriteContainer methods: def _update_size(self): self._size = len(self._order) self._p_changed = True def __setitem__(self, key, value): if isinstance(key, uuid.UUID) or isinstance(key, unicode): key = str(key) elif not (isinstance(key, str) and len(key) == 36): raise KeyError('key does not appear to be string UUID: %s', key) if not self.RECORD_INTERFACE.providedBy(value): raise ValueError('Record value must provide %s' % (self.RECORD_INTERFACE.__identifier__)) self._entries[key] = value if key not in self._order: self._order.append(key) self._update_size() def __delitem__(self, record): uid = record if self.RECORD_INTERFACE.providedBy(record): uid = str(record.record_uid) elif isinstance(record, uuid.UUID): uid = str(record) if not (isinstance(uid, str) and len(uid) == 36): raise ValueError('record neither record object nor UUID') if uid not in self._entries: raise ValueError('record not found contained within') if uid in self._order: self._order.remove(uid) self._update_size() if not self.RECORD_INTERFACE.providedBy(record): record = self._entries.get(uid) # need ref for event notify below del (self._entries[uid]) notify(ObjectRemovedEvent(record, self, uid)) # IRecordContainer and IOrdered re-ordering methods: def reorder(self, record, offset): """ Reorder a record (either UUID or object with record_uid attribute) in self._order, if record exists. If no UUID exists in self._order, raise a ValueError. Offset must be non-negative integer. """ uid = record offset = abs(int(offset)) if self.RECORD_INTERFACE.providedBy(record): uid = record.record_uid if not uid or uid not in self._order: raise ValueError('cannot find record to move for id %s' % uid) self._order.insert(offset, self._order.pop(self._order.index(uid))) def updateOrder(self, order): """Provides zope.container.interfaces.IOrdered.updateOrder""" if len(order) != len(self._order): raise ValueError('invalid number of keys') s_order = set(order) if len(order) != len(s_order): raise ValueError('duplicate keys in order') if s_order - set(self._order): raise ValueError('unknown key(s) provided in order') if not isinstance(order, PersistentList): order = PersistentList(order) self._order = order # IReadContainer interface methods: def get(self, uid, default=None): """ Get object providing IRecord for given UUID uid or return None """ if self.RECORD_INTERFACE.providedBy(uid): uid = uid.record_uid # special case to support __contains__() impl v = self._entries.get(str(uid), default) if v and getattr(v, '_v_parent', None) is None: v._v_parent = self # container marks item with itself as context return v def __contains__(self, record): """ Given record as either IRecord object or UUID, is record contained? """ if self.RECORD_INTERFACE.providedBy(record): return self.get(record, None) is not None return str(record) in self._entries def __len__(self): """ return length of record entries """ size = getattr(aq_base(self), '_size', None) return size if size is not None else len(self._order) def __getitem__(self, key): """Get item by UID key""" v = self.get(key, None) if v is None: raise KeyError('unknown UID for record entry') return v def keys(self): """return tuple with elements ordered""" return tuple(self._order) def values(self): """return tuple of records in order""" return tuple([t[1] for t in self.items()]) def items(self): """return ordered pairs of key/values""" return tuple([(uid, self.get(uid)) for uid in self._order]) def __iter__(self): return self._order.__iter__() # IRecordContainer-specific CRUD methods: def _type_whitelist_validation(self, value): vtype = type(value) if vtype in self.MAPPING_WHITELIST: for k, v in value.items(): if not (k in self.TYPE_WHITELIST and v in self.TYPE_WHITELIST): raise ValueError('Unsupported mapping key/value type') elif vtype in self.SEQUENCE_WHITELIST: for v in value: if v not in self.TYPE_WHITELIST: raise ValueError('Unsupported sequence value type') else: if vtype not in self.TYPE_WHITELIST: raise ValueError('Unsupported data type') def _populate_record(self, record, data): """ Given mapping of data, copy values to attributes on record. Subclasses may override to provide schema validation, selective copy of names, and normalization of values if/as necessary. """ changelog = [] for key, value in data.items(): if key.startswith('_'): continue # invalid key if key == 'record_uid': self.record_uid = str(value) continue try: self._type_whitelist_validation(value) except ValueError: continue # skip problem name! existing_value = getattr(self, key, None) if value != existing_value: changelog.append(key) setattr(record, key, value) if changelog: record._p_changed = True changelog = [ Attributes(self.RECORD_INTERFACE, name) for name in changelog ] notify(ObjectModifiedEvent(record, *changelog)) def create(self, data=None): """ Alternative factory for an IRecord object, does not store object. If data is not None, copy fields from data. """ if data is None: data = {} uid = data.get('record_uid', str(uuid.uuid4())) # get or random uuid record = self.factory(context=self, uid=uid) if data and (hasattr(data, 'get') and hasattr(data, 'items')): self._before_populate(record, data) self._populate_record(record, data) notify(ObjectCreatedEvent(record)) return record def add(self, record): """ Add a record to container, append UUID to end of order; over- write existing entry if already exists for a UUID (in such case leave order as-is). """ uid = str(record.record_uid) if not uid: raise ValueError('record has empty UUID') self._entries[uid] = record if uid not in self._order: self._order.append(uid) self._update_size() notify(ObjectAddedEvent(record, self, uid)) def _ad_hoc_fieldlist(self, record): attrs = [name for name in dir(record) if not name.startswith('_')] fieldnames = [] for name in attrs: v = getattr(record, name) try: self._type_whitelist_validation(v) fieldnames.append(name) except ValueError: pass # ignore name return fieldnames def _filtered_data(self, data): fieldnames = self._ad_hoc_fieldlist(data) if IRecord.providedBy(data): return dict([(k, getattr(data, k, None)) for k in fieldnames]) return dict([(k, data.get(k, None)) for k in fieldnames]) def _before_populate(self, record, data): pass # hook for subclasses def _before_update_notification(self, record, data): pass # hook for subclasses def notify_data_changed(self): notify(ObjectModifiedEvent(self, Attributes(IRecordContainer, 'items'))) def update(self, data, suppress_notify=False): """ Given data, which may be a dict of field key/values or an actual IRecord providing object, update existing entry given a UUID, or add the entry if an entry for that UUID does not yet exist. The update should copy all values for every key provided. Specialized or schema-bound subclasses of this interface may execute more elaborate rules on what data is copied and how it is normalized. Pre-condition: * All new (added) entries updated this way must contain a record_uid field with a string UUID. Post-condition: * New items should always be handled through self.create() and then self.add(). * Method returns modified record. * Should notify at least zope.lifecycleevent.IObjectModifiedEvent, (if changes, detection of which is left up to implementation). * On creation of new records, should notify both IObjectCreatedEvent and IObjectAddedEvent (the record container is the context of record). """ if self.RECORD_INTERFACE.providedBy(data): uid = data.record_uid data = self._filtered_data(data) else: uid = data.get('record_uid', None) if uid is None: raise ValueError('empty record UID on update') uid = str(uid) record = self.get(uid, None) if record is not None: # existing record, already known/saved self._before_populate(record, data) self._populate_record(record, data) # also notifies modified event else: # new, create, then add record = self.create(data) # notifies created, modified for record self.add(record) # notified added event self._before_update_notification(record, data) if (not suppress_notify) and getattr(record, '_p_changed', None): self.notify_data_changed() return record def _process_container_metadata(self, data): return False # hook for subclasses def update_all(self, data): """ Given sequence of data dictionaries or a JSON serialization thereof, update each item. Raises ValueError on missing UID of any item/entry. Also supports JSON serialization of a single record/entry dict. """ _modified = False if isinstance(data, basestring): _data = json.loads(data) if isinstance(_data, dict): # dict might be singluar item, or wrapping object; a wrapping # object would have a list called 'entries' if 'entries' in _data and isinstance(_data['entries'], list): _modified = self._process_container_metadata(_data) # wrapper, get entries from within. _data = _data['entries'] else: # singular record, not a wrapper _data = [_data] # wrap singular item update in list _keynorm = lambda o: dict([(str(k), v) for k, v in o.items()]) data = [_keynorm(o) for o in _data] uids = [str(o['record_uid']) for o in data] existing_uids = set(self.keys()) added_uids = set(uids) - existing_uids modified_uids = set(uids).intersection(existing_uids) for entry_data in data: if 'record_uid' not in entry_data: raise ValueError('record missing UID') record = self.update(entry_data, suppress_notify=True) if not _modified and getattr(record, '_p_changed', None): _modified = True remove_uids = existing_uids - set(uids) for deluid in remove_uids: del (self[deluid]) # remove any previous entries not in the form self._order = PersistentList(uids) # replace old with new uid order if added_uids or modified_uids: _modified = True if data and _modified: self.notify_data_changed() # notify just once
class MembraneTool(BaseTool): """Tool for managing members.""" id = TOOLNAME toolicon = 'tool.gif' meta_type = 'MembraneTool' archetype_name = 'MembraneTool' user_adder = '' case_sensitive_auth = True _catalog_count = None implements(IMembraneTool, IAttributeAnnotatable) manage_options = ( { 'label': 'Types', 'action': 'manage_membranetypes' }, { 'label': 'Status Map', 'action': 'manage_statusmap' }, ) + BaseTool.manage_options security = ClassSecurityInfo() def __init__(self, *args, **kwargs): ZCatalog.__init__(self, self.getId()) self.membrane_types = PersistentList() def attool(self): if USE_COLLECTIVE_INDEXING: return None return getToolByName(self, 'archetype_tool', None) def registerMembraneType(self, portal_type): attool = self.attool() if attool is not None: catalogs = [ x.getId() for x in attool.getCatalogsByType(portal_type) ] if TOOLNAME not in catalogs: catalogs.append(TOOLNAME) attool.setCatalogsByType(portal_type, catalogs) elif portal_type not in self.membrane_types: self.membrane_types.append(portal_type) # Trigger the status maps even if the type is already registered. notify(MembraneTypeRegisteredEvent(self, portal_type)) security.declareProtected(ManagePortal, 'registerMembraneType') def unregisterMembraneType(self, portal_type): attool = self.attool() if attool is not None: catalogs = [ x.getId() for x in attool.getCatalogsByType(portal_type) ] if TOOLNAME in catalogs: catalogs.remove(TOOLNAME) attool.setCatalogsByType(portal_type, catalogs) elif portal_type in self.membrane_types: self.membrane_types.remove(portal_type) notify(MembraneTypeUnregisteredEvent(self, portal_type)) security.declareProtected(ManagePortal, 'unregisterMembraneType') def listMembraneTypes(self): attool = self.attool() if attool is not None: mtypes = [] catalog_map = getattr(aq_base(attool), 'catalog_map', {}) for t, c in list(catalog_map.items()): if self.getId() in c: mtypes.append(t) return mtypes else: return self.membrane_types security.declareProtected(permissions.VIEW_PUBLIC_PERMISSION, 'listMembraneTypes') def getUserObject(self, login=None, user_id=None, brain=False): """ Return the authentication implementation (content item) for a given login or userid. """ query = {} if user_id: if self.case_sensitive_auth and \ ('exact_getUserId' in self._catalog.indexes): query["exact_getUserId"] = user_id else: query["getUserId"] = user_id elif login: if self.case_sensitive_auth and \ ('exact_getUserName' in self._catalog.indexes): query["exact_getUserName"] = login else: query["getUserName"] = login if not query: # No user_id or login name given return None query[ "object_implements"] = user_ifaces.IMembraneUserAuth.__identifier__ uSR = self.unrestrictedSearchResults members = uSR(**query) # filter out inadvertent ZCTextIndex matches by only keeping # records with the same number of characters if "getUserName" in query: members = [ mem for mem in members if len(mem.getUserName) == len(login) ] if "getUserId" in query: members = [ mem for mem in members if len(mem.getUserId) == len(user_id) ] if not members: return None if len(members) == 2: # Usually this is an error case, but when importing or # pasting a copy of a Plone site, the catalog can have # duplicate entries. If there are exactly 2 entries, and # one has a path that is not inside this Plone site, then # we assume this is what's happened and we clear out the # bogus entry. site = getToolByName(self, 'portal_url').getPortalObject() site_path = '/'.join(site.getPhysicalPath()) bogus = [ b.getPath() for b in members if site_path not in b.getPath() ] if len(bogus) == 1: # yup, clear it out and move on self._catalog.uncatalogObject(bogus[0]) members = uSR(**query) assert len(members) == 1, ('more than one member found for "%s"' % (login or user_id)) if brain: return members[0] member = members[0]._unrestrictedGetObject() return member security.declarePrivate('getUserObject') def getOriginalUserIdCase(self, userid): """ Used to get the original case spelling of a given user id. """ if userid == '': return None uSR = self.unrestrictedSearchResults query = { 'getUserId': userid, 'object_implements': user_ifaces.IMembraneUserAuth.__identifier__ } members = uSR(**query) # filter out inadvertent ZCTextIndex matches by only keeping # records with the same number of characters members = [mem for mem in members if len(mem.getUserId) == len(userid)] if not members: return None assert len(members) == 1 return members[0].getUserId
class BodyPartGraph(Persistent): """A collection of BodyParts joined by edges.""" def __init__(self, network_args=None): """if network_args is None create empty BPG else create a random BPG.""" log.debug('BodyPartGraph.__init__') self.bodyparts = PersistentList() self.unrolled = 0 if not network_args: # this is used for unrolled bodypart copies self.root = None else: self.randomInit(network_args) def destroy(self): for bp in self.bodyparts: bp.destroy() def step(self): for bp in self.bodyparts: bp.network.step() for bp in self.bodyparts: if hasattr(bp, 'motor'): bp.motor.step() def randomInit(self, network_args): while 1: # create graph randomly del self.bodyparts[:] num_bodyparts = random.randint(2, BPG_MAX_NODES) log.debug('Creating %d random BodyParts' % (num_bodyparts)) for _ in range(num_bodyparts): bp = BodyPart(network_args) self.bodyparts.append(bp) # randomly select the root node self.root = random.choice(self.bodyparts) self.root.isRoot = 1 root_index = self.bodyparts.index(self.root) # possible n^2 connections num_connects = random.randint(1, BPG_MAX_EDGES) log.debug('creating upto %d random connections', num_connects) # Now select randomly and use to create actual connect inset = [root_index] outset = range(0, root_index) + range(root_index + 1, num_bodyparts) for _ in range(num_connects): # select from inset src_i = random.randint(0, len(inset) - 1) if not outset: break inoutset = inset + outset dst_i = random.randint(0, len(inoutset) - 1) src = self.bodyparts[inset[src_i]] bodyparts_dst_i = inoutset[dst_i] dst = self.bodyparts[bodyparts_dst_i] src.connectTo(dst) # there is no check for an existing edge, so we can get multiple edges between src and dst if not bodyparts_dst_i in inset: inset.append(bodyparts_dst_i) if bodyparts_dst_i in outset: outset.remove(bodyparts_dst_i) u = self.unroll(1) if MIN_UNROLLED_BODYPARTS <= len( u.bodyparts) <= MAX_UNROLLED_BODYPARTS: self.connectInputNodes() self.sanityCheck() break def getNeighbours(self, bp): """Calculate the set of valid neighbour bodyparts of bp A bodypart is a neighbour of bp if it is a parent or child in the bodypartgraph, or if it is bp itself.""" assert bp in self.bodyparts # find possible sources for connection in this phenotype valid_bp_neighbours = [bp] # .. all children valid_bp_neighbours += [e.child for e in bp.edges] # .. parent valid_bp_neighbours += [ p for p in self.bodyparts for e in p.edges if e.child == bp ] for neighbour in valid_bp_neighbours: assert neighbour in self.bodyparts log.debug('valid bp neighbours = %s', valid_bp_neighbours) return valid_bp_neighbours def connectInputNodes(self, sanitycheck=1): """Connect all sensory input nodes up to something. If the bpg is already unrolled, then it is a phenotype and the results won't be backannotated to the genotype input_map. If anything is left unconnected, an assert error will be thrown. If the bpg isn't already unrolled, then it will be, and any missing connections will be randomly selected and backannotated into the genotype input_map, so that later calls to unroll and connect will be able to succeed in connecting every input node up. """ log.debug('BodyPartGraph.connectInputNodes(self=%s)', self) if self.unrolled: log.debug('self.unrolled=1') backannotate = 0 p_bpg = self else: log.debug('self.unrolled=0') backannotate = 1 p_bpg = self.unroll() log.debug('p_bpg=%s (bodyparts=%s)' % (p_bpg, p_bpg.bodyparts)) # find all unconnected nodes un = set([(p_dst_bp, p_dst_signal) for p_dst_bp in p_bpg.bodyparts for p_dst_signal in p_dst_bp.network.inputs if not p_dst_signal.externalInputs]) # and unconnected motors un = un.union( set([(p_dst_bp, 'MOTOR_%d' % i) for p_dst_bp in p_bpg.bodyparts for i in 0, 1, 2 if not p_dst_bp.motor_input[i]])) for (p_dst_bp, p_dst_signal) in un: log.debug('UNCONNECTED bp %s signal %s', p_dst_bp, p_dst_signal) # find corresponding genotype of this node/motor g_bp = p_dst_bp.genotype if isinstance(p_dst_signal, node.Node): g_dst_signal = g_bp.network[p_dst_bp.network.index( p_dst_signal)] assert g_dst_signal in g_bp.network.inputs else: g_dst_signal = p_dst_signal # is there an entry in g_bp.input_map for the target node/motor? if not g_bp.input_map.has_key(g_dst_signal): g_bp.input_map[g_dst_signal] = PersistentList() # are there matching maps for this phenotype topology? p_neighbours = p_bpg.getNeighbours(p_dst_bp) # find all neighbour bps with valid src bp,signal for this dst in input_map matches = [(g_src_bp, g_src_signal, p_src_bp, weight) for (g_src_bp, g_src_signal, weight) in g_bp.input_map[g_dst_signal] for p_src_bp in p_neighbours if p_src_bp.genotype is g_src_bp] log.debug('input_map matches = %s', matches) p_source = None for (g_src_bp, g_src_signal, p_src_bp, weight) in matches: log.debug( 'using prestored map g_src_bp=%s g_src_signal=%s weight=%f', g_src_bp, g_src_signal, weight) # convert genotype src signal to phenotype value if type(g_src_signal) is str: p_source = (p_src_bp, g_src_signal, weight) break else: # find phenotype src node g_src_index = g_src_bp.network.index(g_src_signal) p_src_node = p_src_bp.network[g_src_index] if isinstance(p_dst_signal, node.Node) and isinstance( p_src_node, node.Node) and p_src_bp == p_dst_bp: continue # assert not two nodes in same bp network assert not (isinstance(p_dst_signal, node.Node) and isinstance(p_src_node, node.Node)) or ( p_src_bp != p_dst_bp) # don't allow an external_input if the connection # already exists internally to the network if not isinstance( p_dst_signal, node.Node ) or p_src_node not in p_dst_signal.inputs: # set source to a phenotype (bp,s) assert p_src_node in p_src_bp.network p_source = (p_src_bp, p_src_node, weight) break log.debug('rejected map - nodes already connected') if not p_source: # no entry in input_map for this node/motor # raise error if we aren't connecting up a genotype bpg assert backannotate # pick a random (bp, signal) from p_bp and backannotate into g_bp.input_map p_src_bp = random.choice(p_neighbours) # no direct connects between sensors and motors posSrcs = [] if not isinstance(p_dst_signal, str): posSrcs = ['CONTACT', 'JOINT_0', 'JOINT_1', 'JOINT_2'] # disallow connects from outnode to innode of same network if type(p_dst_signal) == str or p_src_bp != p_dst_bp: posSrcs += p_src_bp.network.outputs if isinstance(p_dst_signal, node.Node): for x in posSrcs: assert x not in p_dst_signal.inputs # remove any possible srcs that node is already connected to posSrcs = [ x for x in posSrcs if x not in p_dst_signal.inputs ] log.debug('possible connects %s <- %s', p_dst_signal, posSrcs) p_src_signal = random.choice(posSrcs) if isinstance(p_dst_signal, node.Node): assert p_src_signal not in p_dst_signal.inputs if isinstance(p_src_signal, node.Node): assert p_src_signal in p_src_bp.network weight = random.uniform(-7, 7) p_source = (p_src_bp, p_src_signal, weight) # find genotype of the chosen phenotype (bp,s) g_src_bp = p_src_bp.genotype weight = random.uniform(-7, 7) if isinstance(p_src_signal, node.Node): # phenotype output node -> genotype output node # (depends on offsets being the same) g_src_signal = g_src_bp.network[p_src_bp.network.index( p_src_signal)] assert g_src_signal in g_src_bp.network genosource = (g_src_bp, g_src_signal, weight) else: genosource = (g_src_bp, p_src_signal, weight) log.debug('entering %s -> %s into bp.input_map', genosource, g_dst_signal) # add to genotype.input_map our backannotated source assert (g_dst_signal, genosource) not in g_bp.input_map.items() g_bp.input_map[g_dst_signal].append(genosource) assert g_bp in [pbp.genotype for pbp in p_bpg.bodyparts] # add to signal target. if isinstance(p_dst_signal, node.Node): (p_src_bp, p_src_signal, weight) = p_source if isinstance(p_src_signal, node.Node): assert p_src_signal in p_src_bp.network p_dst_signal.addExternalInput(p_src_bp, p_src_signal, weight) elif p_dst_signal[:6] == 'MOTOR_': i = ord(p_dst_signal[6]) - ord('0') assert not p_dst_bp.motor_input[i] (sbp, ssig, weight) = p_source log.debug('p_bp.motor_input[%d]=(%s,%s)' % (i, sbp, ssig)) assert sbp in p_bpg.bodyparts p_dst_bp.motor_input[i] = p_source else: assert 0 log.debug('/connectInputNodes, calling sanityCheck') if sanitycheck: p_bpg.sanityCheck() log.debug('/BodyPartGraph.connectInputNodes') def getInputs(self, bp): """Return a list of all the external inputs to bodypart bp. Returns: [ (targetneuron, (srcbp, signal, weight), ... ]""" if self.unrolled: s0 = [(neuron, externalInput) for neuron in bp.network.inputs for externalInput in neuron.externalInputs] sources = [] for (n, e) in s0: w = None if isinstance(n, node.WeightNode): w = n.weights[e] (b, s) = e sources += [(n, (b, s, w))] if bp.joint == 'hinge': sources += [('MOTOR_2', bp.motor_input[2])] elif bp.joint == 'universal': sources += [('MOTOR_0', bp.motor_input[0]), ('MOTOR_1', bp.motor_input[1])] elif bp.joint == 'ball': sources += [('MOTOR_0', bp.motor_input[0]), ('MOTOR_1', bp.motor_input[1]), ('MOTOR_2', bp.motor_input[2])] else: sources = [(neuron, src) for neuron in bp.input_map for src in bp.input_map[neuron]] # src is (bp,sig,wei) # remove invalid motor connections if bp.joint == 'hinge': invalid = ['MOTOR_0', 'MOTOR_1'] elif bp.joint == 'universal': invalid = ['MOTOR_2'] elif bp.joint == 'ball': invalid = [] sources = [(n, s) for (n, s) in sources if n not in invalid] return sources def unroll(self, skipNetwork=0): """Returns new BPG, of possibly 0 size. The BPG will be unrolled. Each path through the network will be traced, and a new cloned body part is made for each original. The connectivity of the copy will be the same as the original, except the copy will respect upper limits on the number of instances of any given body part in a single path, and final copy instances of a part will be connected to 'final edge' children. No loops are left in the new BPG.""" log.debug('BodyPartGraph.unroll') # we need a count of every bp to make sure we don't loop too many times for b in self.bodyparts: b._v_instance_count = 0 for b in self.bodyparts: assert b._v_instance_count == 0 bpg = unroll_bodypart(self.root, skipNetwork) bpg.unrolled = 1 log.debug('/BodyPartGraph.unroll (bpg size %d -> size %d)', len(self.bodyparts), len(bpg.bodyparts)) return bpg def sanityCheck(self): "See if anything is wrong with this BodyPartGraph" log.debug('BodyPartGraph.sanityCheck') # check everything we can reach from the root is in our bodypart list assert self.root in self.bodyparts bps = [self.root] assert len([x for x in self.bodyparts if x.isRoot == 1]) == 1 reachable = bps while bps: bp = bps[0] if bp in reachable: # already found del bps[0] else: reachable.append(bp) assert self.bodyparts.count(bp) == 1 #assert bp._v_instance_count >= 0 for e in bp.edges: assert self.bodyparts.count(e.child) == 1 if e.child not in reachable: bps.append(e.child) # check every target child is in our bodyparts list for i in range(len(self.bodyparts)): bp = self.bodyparts[i] for e in bp.edges: assert self.bodyparts.count(e.child) == 1 # make sure that everything is connected if self.unrolled: phen_bpg = self else: phen_bpg = self.unroll() phen_bpg.connectInputNodes(sanitycheck=0) # all external inputs should have a single connection, otherwise it # should be None for bp in phen_bpg.bodyparts: for n in bp.network: if n in bp.network.inputs: assert n.externalInputs for (sbp, src) in n.externalInputs: assert sbp in phen_bpg.bodyparts if isinstance(src, node.Node): assert src in sbp.network assert src in sbp.network.outputs assert bp != sbp # no inter-network connections # check motor connections for i in 0, 1, 2: assert bp.motor_input[i] (sbp, src, weight) = bp.motor_input[i] assert sbp in phen_bpg.bodyparts if isinstance(src, node.Node): assert src in sbp.network.outputs if not self.unrolled: # assert that *genotype* has no motor_inputs for bp in self.bodyparts: for i in 0, 1, 2: assert not bp.motor_input[i] for bp in self.bodyparts: if self.unrolled: # we only use input maps for the genotype, since phenotype BPs # link back to their genotype BPs anyway assert not bp.input_map else: # Make sure all entries in input_map are valid bodyparts and neurons for (tsignal, srclist) in bp.input_map.items(): assert tsignal in bp.network.inputs or tsignal[:5] == 'MOTOR' for (sbp, ssignal, w) in srclist: assert sbp in self.bodyparts if isinstance(ssignal, node.Node): assert ssignal in sbp.network else: assert ssignal in [ 'JOINT_0', 'JOINT_1', 'JOINT_2', 'CONTACT' ] assert isinstance(w, float) # check src and dst nodes for externalInputs are in respective bp.network for bp in phen_bpg.bodyparts: sources = phen_bpg.getInputs(bp) for (tsignal, (sbp, signal, w)) in sources: sbp_i = phen_bpg.bodyparts.index(sbp) tbp_i = phen_bpg.bodyparts.index(bp) if isinstance(tsignal, node.Node): bp.network.index(tsignal) if isinstance(signal, node.Node): assert signal in sbp.network def fixup(self): """Fix any problems with this BodyPartGraph (ie. invalid connections, bad root, etc.) This is called on rolled bpgs after mutation, and unrolled after modification to fit simulation constraints (eg. MAX_UNROLLED_BODYPARTS). """ # remove edges that point to invalid children for bp in self.bodyparts: edges_to_remove = [] for e in bp.edges: if e.child not in self.bodyparts: #bp.edges.remove(e) edges_to_remove.append(e) for e in edges_to_remove: bp.edges.remove(e) # make sure root exists if self.root not in self.bodyparts or len( [x for x in self.bodyparts if x.isRoot == 1]) != 1: # randomly select the root node for b in self.bodyparts: b.isRoot = 0 self.root = random.choice(self.bodyparts) self.root.isRoot = 1 assert len([x for x in self.bodyparts if x.isRoot == 1]) == 1 # remove input_map entries that are invalid for bp in self.bodyparts: if bp.input_map: # we need to keep a list and erase at the end otherwise we fall into # the trap of removing items for a mutable list whilst iterating # over it for (tneuron, srclist) in bp.input_map.items(): if tneuron not in bp.network.inputs: del bp.input_map[tneuron] else: for (sbp, sneuron, w) in srclist[:]: if sbp not in self.bodyparts or sneuron not in sbp.network: srclist.remove((sbp, sneuron, w)) for bp in self.bodyparts: if bp.input_map: for (tneuron, srclist) in bp.input_map.items(): for (sbp, sneuron, w) in srclist: assert sbp in self.bodyparts # check whether input_map entries are still valid for bp in self.bodyparts: if bp.input_map: krm = [] for k in bp.input_map.keys(): if k not in self.bodyparts: krm.append(k) else: # key is valid toremove = [] for (sbp, sig, w) in bp.input_map[k]: # check sbp is ok and src is a string or output node if sbp not in self.bodyparts or ( isinstance(sig, node.Node) and sig not in sbp.network.outputs): toremove.append((sbp, sig, w)) for x in toremove: bp.input_map[k].remove(x) for k in krm: del bp.input_map[k] self.connectInputNodes() self.sanityCheck() def mutate_delete_edges(self, p): "Randomly erase edges in this BodyPartGraph with probability p" for bp in self.bodyparts: for i in range(len(bp.edges) - 1, -1, -1): if random.random() < p: # delete edge log.debug('delete edge') self.mutations += 1 del bp.edges[i] self.fixup() self.sanityCheck() def mutate_add_edges(self, p): "Randomly add edges in this BodyPartGraph with probability p" for s_bp in self.bodyparts: if random.random() < p: #and len(self.bodyparts) < BPG_MAX_EDGES: # add edge log.debug('add edge') self.mutations += 1 t_bp = random.choice(self.bodyparts) e = Edge(t_bp, random.choice([-1, 1]), random.choice([0, 1])) s_bp.edges.append(e) # we now have new nodes in the unrolled bpg which don't have # entries in their genotype bp for their neighbours, so fixup self.fixup() self.sanityCheck() def mutate_delete_nodes(self, p): "Randomly delete nodes in this BodyPartGraph with probability p" for i in range(len(self.bodyparts) - 1, -1, -1): if random.random() < p and len(self.bodyparts) > 1: # delete node log.debug('delete node') self.mutations += 1 bp_del = self.bodyparts[i] # delete all edges pointing to this node for bp in self.bodyparts: edges_to_remove = [] for e in bp.edges: if e.child == bp_del: edges_to_remove.append(e) for e in edges_to_remove: bp.edges.remove(e) self.bodyparts.remove(bp_del) if bp_del == self.root: self.root = random.choice(self.bodyparts) self.root.isRoot = 1 self.fixup() self.sanityCheck() def mutate_copy_nodes(self, p): "Randomly copy nodes in this BodyPartGraph with probability p" for i in range(len(self.bodyparts)): if random.random() < p and len(self.bodyparts) < BPG_MAX_NODES: # copy and mutate node log.debug('copy node') self.mutations += 1 c = copy.deepcopy(self.bodyparts[i]) # we did in fact just copy everything the bp links to ... # fixme: correct? yes? efficient? probably not. c.edges = PersistentList() c.mutate(p) self.bodyparts.append(c) # random incoming edges i = random.randint(1, len(self.bodyparts) / 2) for _ in range(i): # add edges e = Edge(c, random.choice([-1, 1]), random.choice([0, 1])) s_bp = random.choice(self.bodyparts) s_bp.edges.append(e) # random outgoing edges i = random.randint(1, len(self.bodyparts) / 2) for _ in range(i): # add edges t_bp = random.choice(self.bodyparts) e = Edge(t_bp, random.choice([-1, 1]), random.choice([0, 1])) c.edges.append(e) self.fixup() self.sanityCheck() def mutate_inputmaps(self, p): "Randomly rewire input_maps in each BodyPart with probability p" for bp in self.bodyparts: for _ in range(len(bp.input_map)): if random.random() < p: log.debug('mutate input_map') self.mutations += 1 di = random.choice(bp.input_map.keys()) if random.random() < 0.5: del bp.input_map[di] else: # mutate weight xp = random.randrange(0, len(bp.input_map[di])) x = list(bp.input_map[di][xp]) x[2] = rnd(-7, 7, x[2]) bp.input_map[di][xp] = tuple(x) self.connectInputNodes() self.sanityCheck() def mutate(self, p): "Mutate the BodyPartGraph nodes, edges, and all parameters." log.debug('bpg.mutate(p=%f)', p) self.sanityCheck() self.mutations = 0 self.mutate_delete_edges(p) self.mutate_add_edges(p) self.mutate_delete_nodes(p) self.mutate_copy_nodes(p) self.mutate_inputmaps(p) # FIXME: mutate number of input and output nodes # mutate motors and sensors? self.sanityCheck() for bp in self.bodyparts: # mutate individual parameters self.mutations += bp.mutate(p) # since bp mutate can change the topology of the unrolled graph via # recursive_limit, we need to fix up external_input and maybe others self.fixup() self.sanityCheck() self.sanityCheck() log.debug('/bpg.mutate') return self.mutations
class OrderedContainer(Persistent, Contained): """ `OrderedContainer` maintains entries' order as added and moved. >>> oc = OrderedContainer() >>> int(IOrderedContainer.providedBy(oc)) 1 >>> len(oc) 0 """ implements(IOrderedContainer) def __init__(self): self._data = PersistentDict() self._order = PersistentList() def keys(self): """ See `IOrderedContainer`. >>> oc = OrderedContainer() >>> oc.keys() [] >>> oc['foo'] = 'bar' >>> oc.keys() ['foo'] >>> oc['baz'] = 'quux' >>> oc.keys() ['foo', 'baz'] >>> int(len(oc._order) == len(oc._data)) 1 """ return self._order[:] def __iter__(self): """ See `IOrderedContainer`. >>> oc = OrderedContainer() >>> oc.keys() [] >>> oc['foo'] = 'bar' >>> oc['baz'] = 'quux' >>> [i for i in oc] ['foo', 'baz'] >>> int(len(oc._order) == len(oc._data)) 1 """ return iter(self.keys()) def __getitem__(self, key): """ See `IOrderedContainer`. >>> oc = OrderedContainer() >>> oc['foo'] = 'bar' >>> oc['foo'] 'bar' """ return self._data[key] def get(self, key, default=None): """ See `IOrderedContainer`. >>> oc = OrderedContainer() >>> oc['foo'] = 'bar' >>> oc.get('foo') 'bar' >>> oc.get('funky', 'No chance, dude.') 'No chance, dude.' """ return self._data.get(key, default) def values(self): """ See `IOrderedContainer`. >>> oc = OrderedContainer() >>> oc.keys() [] >>> oc['foo'] = 'bar' >>> oc.values() ['bar'] >>> oc['baz'] = 'quux' >>> oc.values() ['bar', 'quux'] >>> int(len(oc._order) == len(oc._data)) 1 """ return [self._data[i] for i in self._order] def __len__(self): """ See `IOrderedContainer`. >>> oc = OrderedContainer() >>> int(len(oc) == 0) 1 >>> oc['foo'] = 'bar' >>> int(len(oc) == 1) 1 """ return len(self._data) def items(self): """ See `IOrderedContainer`. >>> oc = OrderedContainer() >>> oc.keys() [] >>> oc['foo'] = 'bar' >>> oc.items() [('foo', 'bar')] >>> oc['baz'] = 'quux' >>> oc.items() [('foo', 'bar'), ('baz', 'quux')] >>> int(len(oc._order) == len(oc._data)) 1 """ return [(i, self._data[i]) for i in self._order] def __contains__(self, key): """ See `IOrderedContainer`. >>> oc = OrderedContainer() >>> oc['foo'] = 'bar' >>> int('foo' in oc) 1 >>> int('quux' in oc) 0 """ return self._data.has_key(key) has_key = __contains__ def __setitem__(self, key, object): """ See `IOrderedContainer`. >>> oc = OrderedContainer() >>> oc.keys() [] >>> oc['foo'] = 'bar' >>> oc._order ['foo'] >>> oc['baz'] = 'quux' >>> oc._order ['foo', 'baz'] >>> int(len(oc._order) == len(oc._data)) 1 >>> oc['foo'] = 'baz' Traceback (most recent call last): ... KeyError: u'foo' >>> oc._order ['foo', 'baz'] """ existed = self._data.has_key(key) bad = False if isinstance(key, StringTypes): try: unicode(key) except UnicodeError: bad = True else: bad = True if bad: raise TypeError("'%s' is invalid, the key must be an " "ascii or unicode string" % key) if len(key) == 0: raise ValueError("The key cannot be an empty string") # We have to first update the order, so that the item is available, # otherwise most API functions will lie about their available values # when an event subscriber tries to do something with the container. if not existed: self._order.append(key) # This function creates a lot of events that other code listens to. try: setitem(self, self._data.__setitem__, key, object) except Exception, e: if not existed: self._order.remove(key) raise e return key
class Game(Persistent): """ A Game aggregates the players and matches that are part of a competition. For example, a Game could be 'Football' or 'Hockey' """ def __init__(self, name): self.name = name # Player name -> Player self.players = PersistentMapping() # List of all matches for this game self.matches = PersistentList() # Whether to use average instead of sum-of-skill for this game self.use_average_team_skill = True def delete_match(self, match): if not match in self.matches: return self.matches.remove(match) players = match.teams[0] + match.teams[1] for p in players: if match in p.matches: p.matches.remove(match) self.recalculate_ratings() for p in list(self.players.keys()): if not self.players[p].matches: self.players.pop(p) def add_match(self, teams, score): players_a = [self.get_player(name) for name in teams[0]] players_b = [self.get_player(name) for name in teams[1]] # Add Match to the Database match = Match(self, [players_a, players_b], score) self.matches.append(match) self.update_player_ratings(match) match.update_rating_delta() def update_player_ratings(self, match): ratings_a = [p.get_rating() for p in match.teams[0]] ratings_b = [p.get_rating() for p in match.teams[1]] # Sort by score and get rank indices rank = list(zip(match.score, range(len(match.score)))) rank.sort(key=lambda r: r[0], reverse=True) rank_indices = list(zip(*rank))[1] # Check for Draw # TODO: make this generic for more than 2 teams if match.score[0] == match.score[1]: rank_indices = [0, 0] # Calculate new Ratings using trueskill algorithm new_ratings = trueskill.rate([ratings_a, ratings_b], ranks=rank_indices, weights=self.team_weights( ratings_a, ratings_b)) for r, p in zip(new_ratings[0], match.teams[0]): p.set_rating(r) p.add_match(match) for r, p in zip(new_ratings[1], match.teams[1]): p.set_rating(r) p.add_match(match) def recalculate_ratings(self): for player in self.players.values(): player.reset_rating() player.matches.clear() player.history.clear() for match in self.matches: match.init_stats() self.update_player_ratings(match) match.update_rating_delta() def get_player(self, name): if not name in self.players: self.players[name] = Player(name) return self.players[name] # Calcualte player weights for a match based on whether average or additive ratings # are used for this game def team_weights(self, team1, team2): ratings = [team1, team2] weights = [[1 for _ in r] for r in ratings] if self.use_average_team_skill: # Adjust weights by team-size. This effectively causes the TrueSkill algorithm to # look at the average instead of the sum of skills min_team_size = min(map(len, ratings)) weights = [[min_team_size / float(len(r)) for _ in r] for r in ratings] return weights def win_probability(self, team1, team2): """" Calculate the win probability of team1 over team2 given the skill ratings of all the players in the teams. """ def skill_sum(team, weights): return sum([v.skill() * w for (v, w) in zip(team, weights)]) weights = self.team_weights(team1, team2) delta_mu = skill_sum(team1, weights[0]) - skill_sum(team2, weights[1]) sum_sigma = sum((r.confidence() * w)**2 for ( r, w) in zip(itertools.chain(team1, team2), itertools.chain( *weights))) size = len(team1) + len(team2) denom = math.sqrt(size * (trueskill.BETA * trueskill.BETA) + sum_sigma) ts = trueskill.global_env() return ts.cdf(delta_mu / denom) def draw_probability(self, team1, team2): r1 = [p.get_rating() for p in team1] r2 = [p.get_rating() for p in team2] return trueskill.quality([r1, r2], weights=self.team_weights(team1, team2))
class PersistentOrderedContainer(PersistentContainer): def __init__(self, name, parent=None): PersistentContainer.__init__(self, name, parent) self.__children = PersistentList() def index(self, item): return self.__children.index(item) def get_children(self, index): return self.__children[index] @property def children(self): return list(self.__children) @children.setter def children(self, children): if len(children) != len(self): raise ValueError('len(children) and len(self) must be equal') for child in children: if not child.name in self: raise ValueError('children and self must ' \ 'contain the same objects') self.__children = PersistentList(children) def add(self, item): if item.__name__ not in self: self.__children.append(item) else: raise ValueError('The container already contains this item') PersistentContainer.add(self, item) append = add def insert(self, index, item): if item.__name__ not in self: self.__children.insert(index, item) else: raise ValueError('The container already contains this item') PersistentContainer.append(self, item) def __setitem__(self, name, item): already_in_children = name in self PersistentContainer.__setitem__(self, name, item) if not already_in_children: self.__children.append(item) def __delitem__(self, name): if name in self: self.__children.remove(self[name]) PersistentContainer.__delitem__(self, name) def __iter__(self): return self.iterkeys() def keys(self): return [child.name for child in self.__children] def values(self): return [child for child in self.__children] def items(self): return [(child.name, child) for child in self.__children] def iterkeys(self): for child in self.__children: yield child.name def itervalues(self): for child in self.__children: yield child def iteritems(self): for child in self.__children: yield child.name, child
class SchemaManager(Persistent): """ Persistent schema manager, persists a list of dotted interface names, and resolves values at runtime dynamically with zope.dottedname. """ implements(ISchemaManager) def __init__(self): self._names = PersistentList() # dotted names ## mapping interface, with lazy resolution of schema ## interfaces by zope.dottedname import/resolution def get(self, name, default=None): name = str(name) if name not in self._names: return default v = _resolve(name) return v def __getitem__(self, name): v = self.get(name, None) if v is None: raise KeyError(name) return v def __contains__(self, name): if IInterface.providedBy(name): name = identify_interface(name) name = str(name) return name in self._names def keys(self): return list(self._names) def values(self): return list(self.itervalues()) def items(self): return list(self.iteritems()) def iterkeys(self): return self._names def itervalues(self): return itertools.imap(lambda k: self.get(k), self.iterkeys()) def iteritems(self): return itertools.imap(lambda k: (k, self.get(k)), self.iterkeys()) def __len__(self): return len(self._names) __iter__ = iterkeys ## mutable interface bind and forget: def bind(self, schema): if not IInterface.providedBy(schema): raise TypeError('Cannot bind non-interface object %s' % schema) name = identify_interface(schema) if name in self._names: raise KeyError( 'duplicate schema: Interface %s already managed.' % (name,)) self._names.append(name) def forget(self, schema): name = str(schema) if IInterface.providedBy(schema): name = identify_interface(schema) if name not in self._names: return self._names.remove(name) def orphans(self): return tuple(k for k, v in self.iteritems() if v is None)
class ZopeStore(OpenIDStore): """Zope OpenID store. This class implements an OpenID store which uses the ZODB. """ def __init__(self): self.associations = OOBTree() self.handles = OOBTree() self.nonces = OITreeSet() self.noncetimeline = PersistentList() self.assoctimeline = PersistentList() def getAssociationKey(self, server_url, handle): """Generate a key used to identify an association in our storage. """ if handle is None: return self.handles[server_url][0] return (server_url, handle) def storeAssociation(self, server_url, association): key = self.getAssociationKey(server_url, association.handle) self.associations[key] = association.serialize() now = time.time() def getKey(item): return self.getAssociation(item[0], item[1], remove=False).getExpiresIn(now) lst = self.handles.get(server_url, []) lst.append(key) lst.sort(key=getKey) self.handles[server_url] = lst if not hasattr(self, "assoctimeline"): # BBB for versions < 1.0b2 self.assoctimeline = PersistentList() self.assoctimeline.append( (association.issued + association.lifetime, key)) def getAssociation(self, server_url, handle=None, remove=True): try: key = self.getAssociationKey(server_url, handle) assoc = Association.deserialize(self.associations[key]) except KeyError: return None if remove and assoc.getExpiresIn() == 0: self.removeAssociation(server_url, handle) return None return assoc def removeAssociation(self, server_url, handle): key = self.getAssociationKey(server_url, handle) try: assoc = Association.deserialize(self.associations[key]) del self.associations[key] lst = self.handles[server_url] lst.remove(key) self.handles[server_url] = lst self.assoctimeline.remove((assoc.issued + assoc.lifetime, key)) return True except KeyError: return False def useNonce(self, server_url, timestamp, salt): nonce = (salt, server_url) if nonce in self.nonces: return False self.nonces.insert(nonce) if not hasattr(self, "noncetimeline"): # BBB for store instances from before 1.0b2 self.noncetimeline = PersistentList() self.noncetimeline.append((timestamp, nonce)) return True def cleanupNonces(self): if not hasattr(self, "noncetimeline"): return 0 cutoff = time.time() + SKEW count = 0 for (timestamp, nonce) in self.noncetimeline: if timestamp < cutoff: self.noncetimeline.remove((timestamp, nonce)) self.nonces.remove(nonce) count += 1 return count def cleanupAssociations(self): if not hasattr(self, "assoctimeline"): return 0 now = time.time() count = 0 expired = (key for (timestamp, key) in self.assoctimeline if timestamp <= now) for key in expired: self.removeAssociation(*key) count += 1 return count
class Job(persistent.Persistent): """ class representing several scripts """ def __init__(self): self.id = None self.user_id = None self.name = None self.status = None self.settings = PersistentMapping() self.scripts = PersistentList() def get_scripts(self): """ :return: list of Script objects """ return [db.scripts.get(script_id, None) for script_id in self.scripts] def add_script(self, script): """ adds Script to internal list of scripts :param script: Script :return: None """ self.scripts.append(script.id) def delete_script(self, script): """ :param script: script to be removed :return: None """ self.scripts.remove(script.id) def get_user(self): """ :return: User object, creator of this job """ return db.users.get(self.user_id) def get_script_at(self, position=-1): """ :param position: :return: Script or None :rtype Script: """ try: return db.scripts.get(self.scripts[position]) except: return None def script(self): """ :return: current Script (last one) :rtype Script: """ return self.get_script_at(-1) def get_result_cls(self): """ :return: css class for this instance result :rtype str: """ return self.script().get_result_cls() if self.scripts else 'default' def get_result_str(self): """ :return: str representation for this instance result :rtype str: """ return self.script().get_result_str() if self.scripts else 'No results' def __repr__(self): return u"<Job '{self.id}' '{self.name}', scripts=[{scripts}]>".format( self=self, scripts=self.get_scripts()) def __str__(self): return self.__repr__() def __unicode__(self): return self.__repr__() def as_dict(self, peek=False): """ :param peek: whether to load script from db :return: dict representation of this object :rtype: dict """ d = dict(id=self.id, name=self.name, status=self.status, settings=dict(self.settings), user=self.get_user().as_dict(), scripts_id=[script_id for script_id in self.scripts]) if not peek: d['scripts'] = [script.as_dict() for script in self.get_scripts()] return d remove_script = delete_script
class RoomBlocking(Persistent, RoomBlockingBase): __dalManager = Factory.getDALManager() def __init__(self): RoomBlockingBase.__init__(self) self.blockedRooms = PersistentList() self.allowed = PersistentList() @staticmethod def getRoot(): return RoomBlocking.__dalManager.getRoot(_ROOMBLOCKING) @staticmethod def getTotalCount(): return len(RoomBlocking.getRoot()['Blockings']) @staticmethod def getAll(): return [block for block in RoomBlocking.getRoot()['Blockings'].itervalues()] @staticmethod def getById(id): blockingsBTree = RoomBlocking.getRoot()['Blockings'] return blockingsBTree.get(id) @staticmethod def getByOwner(owner): idx = RoomBlocking.getRoot()['Indexes']['OwnerBlockings'] return idx.get(owner.id, []) @staticmethod def getByRoom(room, active=-1): idx = RoomBlocking.getRoot()['Indexes']['RoomBlockings'] blocks = idx.get(str(room.guid), []) return [block for block in blocks if active is block.active or active == -1] @staticmethod def getByDate(date): idx = RoomBlocking.getRoot()['Indexes']['DayBlockings'] return list(idx.getObjectsInDay(date)) @staticmethod def getByDateSpan(begin, end): idx = RoomBlocking.getRoot()['Indexes']['DayBlockings'] return list(idx.getObjectsInDays(begin, end)) def addAllowed(self, principal): """ Add a principal (Avatar, Group, CERNGroup or a RoomBlockingPrincipal) to the blocking's ACL """ if isinstance(principal, RoomBlockingPrincipal): self.allowed.append(principal) else: self.allowed.append(RoomBlockingPrincipal(principal)) def delAllowed(self, principal): """ Remove a principal (Avatar, Group, CERNGroup or a RoomBlockingPrincipal) from the blocking's ACL """ if isinstance(principal, RoomBlockingPrincipal): self.allowed.remove(principal) else: self.allowed.remove(RoomBlockingPrincipal(principal)) def getBlockedRoom(self, room): """ Get the BlockedRoom object for a certain room """ for br in self.blockedRooms: if br.roomGUID == str(room.guid): return br return None def notifyOwners(self): """ Send emails to all room owners who need to approve blockings. Every owner gets only a single email, containing all the affected rooms. """ notify_owners = defaultdict(list) for rb in self.blockedRooms: if rb.active is None and not rb.notificationSent: notify_owners[rb.room.responsibleId].append(rb) rb.notificationSent = True emails = [] for ownerId, roomBlockings in notify_owners.iteritems(): emails += RoomBlockingNotification.requestConfirmation(AvatarHolder().getById(ownerId), self, roomBlockings) for email in emails: GenericMailer.send(GenericNotification(email)) def insert(self): """ Insert a new blocking in the database, index it and reject colliding bookings """ self.createdDT = datetime.datetime.now() # Save blockingsBTree = RoomBlocking.getRoot()['Blockings'] # Ensure ID if self.id is None: # Maximum ID + 1 if len(blockingsBTree) > 0: self.id = blockingsBTree.maxKey() + 1 else: self.id = 1 # Can not use maxKey for 1st record in a tree # Add self to the BTree blockingsBTree[self.id] = self self._index() # Reject colliding bookings. for rb in self.blockedRooms: if rb.active: rb.approve(sendNotification=False) self.notifyOwners() def remove(self): """ Remove a blocking from the database """ self._unindex() blockingsBTree = RoomBlocking.getRoot()['Blockings'] del blockingsBTree[self.id] def update(self): """ Re-index a blocking and notify owners which haven't been notified before """ self._unindex() self._index() self.notifyOwners() def _index(self): # Update room => room blocking index (it maps to the BlockedRoom objects) rbi = RoomBlocking.getRoot()['Indexes']['RoomBlockings'] for rb in self.blockedRooms: roomBlockings = rbi.get(rb.roomGUID) if roomBlockings is None: roomBlockings = PersistentList() rbi[rb.roomGUID] = roomBlockings roomBlockings.append(rb) # Update owner => room blocking index obi = RoomBlocking.getRoot()['Indexes']['OwnerBlockings'] roomBlockings = obi.get(self._createdBy) if roomBlockings is None: roomBlockings = PersistentList() obi[self._createdBy] = roomBlockings roomBlockings.append(self) # Update day => room blocking index cdbi = RoomBlocking.getRoot()['Indexes']['DayBlockings'] cdbi.indexConf(self) def _unindex(self): # Update room => room blocking index rbi = RoomBlocking.getRoot()['Indexes']['RoomBlockings'] for rb in self.blockedRooms: roomBlockings = rbi.get(rb.roomGUID) if roomBlockings is not None and rb in roomBlockings: roomBlockings.remove(rb) # Update owner => room blocking index obi = RoomBlocking.getRoot()['Indexes']['OwnerBlockings'] roomBlockings = obi.get(self._createdBy) if roomBlockings is not None and self in roomBlockings: roomBlockings.remove(self) # Update day => room blocking index cdbi = RoomBlocking.getRoot()['Indexes']['DayBlockings'] cdbi.unindexConf(self) def __repr__(self): return '<RoomBlocking(%r, %r, %s)>' % (self.id, self.blockedRooms, self.allowed)
class ToManyRelationship(ToManyRelationshipBase): """ ToManyRelationship manages the ToMany side of a bi-directional relation between to objects. It does not return values for any of the object* calls defined on ObjectManager so that Zope can still work with its containment assumptions. It provides object*All calles that return its object in the same way that ObjectManager does. Related references are maintained in a list. """ __pychecker__='no-override' meta_type = "ToManyRelationship" security = ClassSecurityInfo() def __init__(self, id): """ToManyRelationships use an array to store related objects""" self.id = id self._objects = PersistentList() def __call__(self): """when we are called return our related object in our aq context""" return self.objectValuesAll() def hasobject(self, obj): "check to see if we have this object" try: idx = self._objects.index(obj) return self._objects[idx] except ValueError: return None def manage_pasteObjects(self, cb_copy_data=None, REQUEST=None): """ToManyRelationships link instead of pasting""" return self.manage_linkObjects(cb_copy_data=cb_copy_data, REQUEST=REQUEST) def _add(self,obj): """add an object to one side of this toMany relationship""" if obj in self._objects: raise RelationshipExistsError self._objects.append(aq_base(obj)) self.__primary_parent__._p_changed = True def _remove(self, obj=None, suppress_events=False): """remove object from our side of a relationship""" if obj: try: self._objects.remove(obj) except ValueError: raise ObjectNotFound( "object %s not found on relation %s" % ( obj.getPrimaryId(), self.getPrimaryId())) else: self._objects = PersistentList() self.__primary_parent__._p_changed = True def _remoteRemove(self, obj=None): """remove an object from the far side of this relationship if no object is passed in remove all objects""" if obj: if obj not in self._objects: raise ObjectNotFound("object %s not found on relation %s" % ( obj.getPrimaryId(), self.getPrimaryId())) objs = [obj] else: objs = self.objectValuesAll() remoteName = self.remoteName() for obj in objs: rel = getattr(obj, remoteName) try: rel._remove(self.__primary_parent__) except ObjectNotFound: message = log_tb(sys.exc_info()) log.error('Remote remove failed. Run "zenchkrels -r -x1". ' + message) def _setObject(self,id,object,roles=None,user=None,set_owner=1): """Set and object onto a ToMany by calling addRelation""" unused(id, roles, user, set_owner) self.addRelation(object) def _delObject(self, id, dp=1, suppress_events=False): """ Delete object by its absolute id (ie /zport/dmd/bla/bla) (this is sent out in the object*All API) """ obj = getObjByPath(self, id) self.removeRelation(obj, suppress_events=suppress_events) def _getOb(self, id, default=zenmarker): """ Return object based on its primaryId. plain id will not work!!! """ objs = filter(lambda x: x.getPrimaryId() == id, self._objects) if len(objs) == 1: return objs[0].__of__(self) if default != zenmarker: return default raise AttributeError(id) def objectIdsAll(self): """ Return object ids as their absolute primaryId. """ return [obj.getPrimaryId() for obj in self._objects] def objectIds(self, spec=None): """ ToManyRelationship doesn't publish objectIds to prevent zope recursion problems. """ unused(spec) return [] security.declareProtected('View', 'objectValuesAll') def objectValuesAll(self): """return all related object values""" return list(self.objectValuesGen()) def objectValuesGen(self): """Generator that returns all related objects.""" return (obj.__of__(self) for obj in self._objects) def objectValues(self, spec=None): """ ToManyRelationship doesn't publish objectValues to prevent zope recursion problems. """ unused(spec) return [] def objectItemsAll(self): """ Return object items where key is primaryId. """ return [(obj.getPrimaryId(), obj) for obj in self._objects] def objectItems(self, spec=None): """ ToManyRelationship doesn't publish objectItems to prevent zope recursion problems. """ unused(spec) return [] def _getCopy(self, container): """ create copy and link remote objects if remote side is TO_MANY """ rel = self.__class__(self.id) rel.__primary_parent__ = container rel = rel.__of__(container) norelcopy = getattr(self, 'zNoRelationshipCopy', []) if self.id in norelcopy: return rel if self.remoteTypeName() == "ToMany": for robj in self.objectValuesAll(): rel.addRelation(robj) return rel def exportXml(self,ofile,ignorerels=[]): """Return an xml representation of a ToManyRelationship <tomany id='interfaces'> <link>/Systems/OOL/Mail</link> </tomany> """ if self.countObjects() == 0: return ofile.write("<tomany id='%s'>\n" % self.id) for id in self.objectIdsAll(): ofile.write("<link objid='%s'/>\n" % id) ofile.write("</tomany>\n") def all_meta_types(self, interfaces=None): """Return empty list not allowed to add objects to a ToManyRelation""" return [] def convertToPersistentList(self): self._objects = PersistentList(self._objects) def checkObjectRelation(self, obj, remoteName, parentObject, repair): deleted = False try: ppath = obj.getPrimaryPath() getObjByPath(self, ppath) except (KeyError, NotFound): log.error("object %s in relation %s has been deleted " \ "from its primary path", obj.getPrimaryId(), self.getPrimaryId()) if repair: log.warn("removing object %s from relation %s", obj.getPrimaryId(), self.getPrimaryId()) self._objects.remove(obj) self.__primary_parent__._p_changed = True deleted = True if not deleted: rrel = getattr(obj, remoteName) if not rrel.hasobject(parentObject): log.error("remote relation %s doesn't point back to %s", rrel.getPrimaryId(), self.getPrimaryId()) if repair: log.warn("reconnecting relation %s to relation %s", rrel.getPrimaryId(),self.getPrimaryId()) rrel._add(parentObject) return deleted def checkRelation(self, repair=False): """Check to make sure that relationship bidirectionality is ok. """ if len(self._objects): log.debug("checking relation: %s", self.id) # look for objects that don't point back to us # or who should no longer exist in the database rname = self.remoteName() parobj = self.getPrimaryParent() for obj in self._objects: self.checkObjectRelation(obj, rname, parobj, repair) # find duplicate objects keycount = {} for obj in self._objects: key = obj.getPrimaryId() c = keycount.setdefault(key, 0) c += 1 keycount[key] = c # Remove duplicate objects or objects that don't exist for key, val in keycount.items(): if val > 1: log.critical("obj:%s rel:%s dup found obj:%s count:%s", self.getPrimaryId(), self.id, key, val) if repair: log.critical("repair key %s", key) self._objects = [ o for o in self._objects \ if o.getPrimaryId() != key ] try: obj = self.getObjByPath(key) self._objects.append(obj) except KeyError: log.critical("obj %s not found in database", key)
class TZContainer(TZObj): 'Base class for all item-containing objects (including characters).' def __init__(self, name='', short='', long='', owner=None, items=None): TZObj.__init__(self, name, short, long, owner) self._item_ids = PersistentList() if items is not None: for item in items: self.add(item) def destroy(self): 'Get rid of this container and every item in it.' for item in self.items(): item.destroy() TZObj.destroy(self) def __contains__(self, obj): 'Return True if the given object is in this container.' return obj.tzid in self._item_ids def has_inside(self, obj): '''return true if obj is somewhere inside of this container. Unlike __contains__, has_inside() will search recursively. ''' found = False for item in self.items(): if item is obj: found = True break elif item.has_inside(obj): found = True break return found def act_near(self, info): '''Something has happened near this object. Handle it if necessary, and pass the action on to any contained items. ''' TZObj.act_near(self, info) for item in self.items(): item.act_near(info) def items(self): 'Return a list of the items in this container.' return [tzindex.get(iid) for iid in self._item_ids] def item(self, iid): 'Return the item with the give id number if it is in this container.' if iid in self._item_ids: return tzindex.get(iid) else: return None def itemnames(self): 'Return a list of the names of the items in this container.' return [item.name for item in self.items()] def itemname(self, name, all=False): '''Return the item with the given name if it is in this container, or None if no such item is in this container. If no item has exactly the name given, items with a name_aka list will be checked to see if the name is another name for the item. Since item names are not unique, itemname returns the first item found with the given name. Pass in the parameter all=True to get a list of all items in this container with the given name instead. Also searches for item names after removing any included article. For instance, if the player says "get the hat" itemname() will first look for "the hat" and then for "hat". ''' result = set() for item in self.items(): if item.name == name: if not all: return item else: if item not in result: result.add(item) for item in self.items(): if hasattr(item, 'name_aka'): for aka in item.name_aka: if aka == name: if not all: return item else: if item not in result: result.add(item) for article in ('a ', 'an ', 'the '): if name.startswith(article): l = len(article) aname = name[l:] with_article = self.itemname(aname, all) if with_article is not None: if not all: return with_article else: result.add(with_article) if result: return list(result) else: return None def add(self, item): 'Put the given item in this container.' if item not in self: self._item_ids.append(item.tzid) item.container = self def remove(self, item): '''Remove the given item from this container, if it is there. Does not raise any error if the item is not in this container. ''' if item in self: self._item_ids.remove(item.tzid) item.container = None def has_inside(self, item): '''Check for item in this container, including inside of containers in this container. ''' for i in self.items(): if i is item: return True if hasattr(i, 'has_inside'): return i.has_inside(item)
class NotificationTool(Folder): """ """ meta_type = core_constants.METATYPE_NOTIFICATIONTOOL icon = 'misc_/NaayaCore/NotificationTool.gif' meta_types = () all_meta_types = meta_types security = ClassSecurityInfo() # default configuration settings default_config = { 'admin_on_error': True, 'admin_on_edit': True, 'enable_instant': True, 'enable_daily': True, 'enable_anonymous': False, # Enable anonymous notifications 'daily_hour': 0, 'enable_weekly': True, 'weekly_day': 1, # 1 = monday, 7 = sunday 'weekly_hour': 0, 'enable_monthly': True, 'monthly_day': 1, # 1 = first day of the month 'monthly_hour': 0, 'notif_content_types': [], } def __init__(self, id, title): """ """ self.id = id self.title = title self.config = PersistentDict(self.default_config) self.timestamps = PersistentDict() # Confirmations list self.pending_anonymous_subscriptions = PersistentList() def get_config(self, key): return self.config.get(key) def get_location_link(self, location): if location: return self.restrictedTraverse(location, self.getSite()).absolute_url() else: return self.getSite().absolute_url() def _validate_subscription(self, **kw): """ Validate add/edit subscription for authorized and anonymous users """ if (kw['notif_type'] not in self.available_notif_types(kw['location']) and not (kw['notif_type'] == 'administrative' and self.checkPermissionPublishObjects())): raise i18n_exception(ValueError, 'Subscribing to ${notif_type} ' 'notifications in "${location}" not allowed', location=kw['location'] or self.getSite().title, notif_type=kw['notif_type']) try: obj = self.getSite().restrictedTraverse(kw['location']) except: raise i18n_exception(ValueError, 'This path is invalid or protected') try: subscription_container = ISubscriptionContainer(obj) except: raise i18n_exception(ValueError, 'Cannot subscribe to this folder') if kw.get('anonymous', False): # Check if subscription exists for this anonymous subscriber if not is_valid_email(kw.get('email', '')): raise i18n_exception(ValueError, 'Your e-mail address does not appear ' 'to be valid.') for id, subscription in subscription_container.list_with_keys(): # Normal subscriptions don't have e-mail if isinstance(subscription, AnonymousSubscription): if (subscription.email == kw['email'] and subscription.notif_type == kw['notif_type'] and subscription.lang == kw['lang']): raise i18n_exception(ValueError, 'Subscription already exists') def _sitemap_dict(self, form): """ Compose a sitemap dict """ node = form.get('node', '') if not node or node == '/': node = '' def traverse(objects, level=0, stop_level=2, exclude_root=False): """ Create a dict with node properties and children. This is a fixed level recursion. On some sites there are a lot of objects so we don't need to get the whole tree. """ res = [] for ob in objects: if ISubscriptionTarget.providedBy(ob) is False: continue children_objects = [] if level != stop_level: # Stop if the level is reached # Create a list of object's children if hasattr(ob, 'objectValues'): # Get only naaya container objects for child in ob.objectValues( self.get_naaya_containers_metatypes()): # Skip unsubmited/unapproved if not getattr(child, 'approved', False): continue elif not getattr(child, 'submitted', False): continue else: children_objects.append(child) if hasattr(ob, 'approved'): icon = ob.approved and ob.icon or ob.icon_marked else: icon = ob.icon children = traverse(children_objects, level+1, stop_level) if exclude_root: # Return only the children if this is set return children res.append({ 'data': { 'title': self.utStrEscapeHTMLTags( self.utToUtf8(ob.title_or_id())), 'icon': icon }, 'attributes': { 'title': path_in_site(ob) }, 'children': children }) return res if node == '': tree_dict = traverse([self.getSite()]) else: tree_dict = traverse([self.restrictedTraverse(node)], exclude_root=True) return tree_dict security.declarePublic('sitemap') def sitemap(self, REQUEST=None, **kw): """ Return a json (for Ajax tree) representation of published objects marked with `ISubscriptionTarget` including the portal organized in a tree (sitemap) """ form = {} if REQUEST is not None: form = REQUEST.form REQUEST.RESPONSE.setHeader('content-type', 'application/json') else: form.update(kw) return json.dumps(self._sitemap_dict(form)) security.declarePrivate('add_account_subscription') def add_account_subscription(self, user_id, location, notif_type, lang, content_types=[]): """ Subscribe the user `user_id` """ self._validate_subscription(user_id=user_id, location=location, notif_type=notif_type, lang=lang, content_types=content_types) try: self.remove_account_subscription(user_id, location, notif_type, lang) except ValueError: pass obj = self.getSite().restrictedTraverse(location) subscription_container = ISubscriptionContainer(obj) subscription = AccountSubscription(user_id, notif_type, lang, content_types) subscription_container.add(subscription) security.declarePrivate('add_anonymous_subscription') def add_anonymous_subscription(self, **kw): """ Handle anonymous users """ self._validate_subscription(anonymous=True, **kw) subscription = AnonymousSubscription(**kw) # Add to temporary container self.pending_anonymous_subscriptions.append(subscription) # Send email email_tool = self.getSite().getEmailTool() email_from = email_tool.get_addr_from() email_template = EmailPageTemplateFile( 'emailpt/confirm.zpt', globals()) email_data = email_template.render_email( **{'key': subscription.key, 'here': self}) email_to = subscription.email email_tool.sendEmail(email_data['body_text'], email_to, email_from, email_data['subject']) security.declarePrivate('remove_account_subscription') def remove_account_subscription(self, user_id, location, notif_type, lang, content_types=None): obj = self.getSite().restrictedTraverse(location) subscription_container = ISubscriptionContainer(obj) n = utils.match_account_subscription(subscription_container, user_id, notif_type, lang, content_types) if n is None: raise ValueError('Subscription not found') subscription_container.remove(n) security.declarePrivate('unsubscribe_links_html') unsubscribe_links_html = PageTemplateFile("emailpt/unsubscribe_links.zpt", globals()) security.declarePrivate('remove_anonymous_subscription') def remove_anonymous_subscription(self, email, location, notif_type, lang): try: obj = self.getSite().restrictedTraverse(location) except: raise i18n_exception(ValueError, 'Invalid location') try: subscription_container = ISubscriptionContainer(obj) except: raise i18n_exception(ValueError, 'Invalid container') anonymous_subscriptions = [(n, s) for n, s in subscription_container.list_with_keys() if hasattr(s, 'email')] subscriptions = filter(lambda s: (s[1].email == email and s[1].location == location and s[1].notif_type == notif_type), anonymous_subscriptions) if len(subscriptions) == 1: subscription_container.remove(subscriptions[0][0]) else: raise i18n_exception(ValueError, 'Subscription not found') security.declareProtected(view, 'available_notif_types') def available_notif_types(self, location=''): if self.config['enable_instant']: yield 'instant' if self.config['enable_daily']: yield 'daily' if self.config['enable_weekly']: yield 'weekly' if self.config['enable_monthly']: yield 'monthly' security.declarePrivate('notify_maintainer') def notify_maintainer(self, ob, folder, **kwargs): """ Process and notify by email that B{p_object} has been uploaded into the B{p_folder}. """ auth_tool = self.getSite().getAuthenticationTool() emails = self.getMaintainersEmails(ob) person = self.REQUEST.AUTHENTICATED_USER.getUserName() if len(emails) > 0: maintainers_data = {} for email in emails: maintainers_data[email] = { 'ob': ob, 'here': self, 'person': auth_tool.name_from_userid(person), 'ob_edited': kwargs.get('ob_edited'), 'approved': ob.approved, 'container_basket': '%s/basketofapprovals_html' % folder.absolute_url(), } notif_logger.info('Maintainer notifications on %r', ofs_path(ob)) template = self._get_template('maintainer') self._send_notifications(maintainers_data, template) security.declarePrivate('notify_comment_maintainer') def notify_comment_maintainer(self, comment, parent, **kwargs): """ Process and notify by email that a comment B{comemnt} has been added to the object B{parent}. """ auth_tool = self.getSite().getAuthenticationTool() emails = self.getMaintainersEmails(parent) if len(emails) > 0: maintainers_data = {} for email in emails: maintainers_data[email] = { 'parent': parent, 'here': self, 'comment': comment, 'person': auth_tool.name_from_userid(comment.author), 'container_basket': '%s/basketofapprovals_html' % parent.absolute_url(), } notif_logger.info('Maintainer comment notifications on %r', ofs_path(parent)) template = self._get_template('maintainer') self._send_notifications(maintainers_data, template) security.declarePrivate('notify_administrative') def notify_administrative(self, ob, user_id, ob_edited=False): """ send administrative notifications because object `ob` was added or edited by the user `user_id` """ auth_tool = self.getSite().getAuthenticationTool() subscribers_data = utils.get_subscribers_data( self, ob, notif_type='administrative', **{'person': auth_tool.name_from_userid(user_id), 'ob_edited': ob_edited, 'approved': ob.approved, 'container_basket': '%s/basketofapprovals_html' % ob.aq_parent.absolute_url(), }) if len(subscribers_data.keys()) > 0: notif_logger.info('Administrative notifications on %r', ofs_path(ob)) template = self._get_template('administrative') self._send_notifications(subscribers_data, template) security.declarePrivate('notify_comment_administrative') def notify_comment_administrative(self, comment, parent, user_id): """ send administrative notifications because a comment was added to object `ob` by the user `user_id` """ auth_tool = self.getSite().getAuthenticationTool() subscribers_data = utils.get_subscribers_data( self, parent, notif_type='administrative', **{'comment': comment, 'parent': parent, 'here': self, 'person': auth_tool.name_from_userid(user_id), }) if len(subscribers_data.keys()) > 0: notif_logger.info('Administrative comment notifications on %r', ofs_path(parent)) template = self._get_template('administrative') self._send_notifications(subscribers_data, template) security.declarePrivate('notify_instant') def notify_instant(self, ob, user_id, ob_edited=False): """ send instant notifications because object `ob` was changed by the user `user_id` """ if not self.config['enable_instant']: return # Don't send notifications if the object is unapproved, but store them # into a queue to send them later when it becomes approved if not ob.approved: return auth_tool = self.getSite().getAuthenticationTool() subscribers_data = utils.get_subscribers_data(self, ob, **{ 'person': auth_tool.name_from_userid(user_id), 'ob_edited': ob_edited, }) if len(subscribers_data.keys()) > 0: notif_logger.info('Instant notifications on %r', ofs_path(ob)) template = self._get_template('instant') self._send_notifications(subscribers_data, template) security.declarePrivate('notify_comment_instant') def notify_comment_instant(self, comment, parent, user_id): """ send instant notifications because a comment was added to object `ob` by the user `user_id` """ if not self.config['enable_instant']: return # Don't send notifications if the object is unapproved, but store them # into a queue to send them later when it becomes approved if not parent.approved: return auth_tool = self.getSite().getAuthenticationTool() subscribers_data = utils.get_subscribers_data(self, parent, **{ 'comment': comment, 'parent': parent, 'person': auth_tool.name_from_userid(user_id), }) if len(subscribers_data.keys()) > 0: notif_logger.info('Comment instant notifications on %r', ofs_path(parent)) template = self._get_template('instant') self._send_notifications(subscribers_data, template) security.declarePrivate('notify_account_modification') def notify_account_modification(self, email, obj, username=None, new_roles=[], removed_roles=[]): """ Send notification that the user received or lost one or more roles in the specified location """ email_data = { email: {'new_roles': new_roles, 'removed_roles': removed_roles, 'username': username, 'obj': obj, } } notif_logger.info('Account modification notification on %s' % self.getSite().getId()) template = self._get_template('account_modified') self._send_notifications(email_data, template) def _get_template(self, name): template = self._getOb('emailpt_%s' % name, None) if template is not None: return template.render_email template = self._getOb(name, None) if template is not None: return template.render_email template = email_templates.get(name, None) if template is not None: return template.render_email raise ValueError('template for %r not found' % name) def _send_notifications(self, messages_by_email, template): """ Send the notifications described in the `messages_by_email` data structure, using the specified EmailTemplate. `messages_by_email` should be a dictionary, keyed by email address. The values should be dictionaries suitable to be passed as kwargs (options) to the template. """ portal = self.getSite() email_tool = portal.getEmailTool() addr_from = email_tool.get_addr_from() for addr_to, kwargs in messages_by_email.iteritems(): translate = self.portal_i18n.get_translation kwargs.update({'portal': portal, '_translate': translate}) mail_data = template(**kwargs) notif_logger.info('.. sending notification to %r', addr_to) utils.send_notification(email_tool, addr_from, addr_to, mail_data['subject'], mail_data['body_text']) def _send_newsletter(self, notif_type, when_start, when_end): """ We'll look in the ``Products.Naaya.NySite.getActionLogger`` for object creation/modification log entries. Then we'll send notifications for the period between `when_start` and `when_end` using the `notif_type` template. """ notif_logger.info('Notifications newsletter on site %r, type %r, ' 'from %s to %s', ofs_path(self.getSite()), notif_type, when_start, when_end) objects_by_email = {} langs_by_email = {} subscriptions_by_email = {} anonymous_users = {} for log_type, ob in utils.get_modified_objects(self.getSite(), when_start, when_end): notif_logger.info('.. modified object: %r', ofs_path(ob)) for subscription in utils.fetch_subscriptions(ob, inherit=True): if subscription.notif_type != notif_type: continue if not subscription.check_permission(ob): continue email = subscription.get_email(ob) if email is None: continue content_types = getattr(subscription, 'content_types', []) if content_types and ob.meta_type not in content_types: continue notif_logger.info('.. .. sending newsletter to %r', email) objects_by_email.setdefault(email, []).append({ 'ob': ob, 'type': log_type, }) langs_by_email[email] = subscription.lang subscriptions_by_email[email] = subscription anonymous_users[email] = isinstance(subscription, AnonymousSubscription) messages_by_email = {} for email in objects_by_email: messages_by_email[email] = { 'objs': objects_by_email[email], '_lang': langs_by_email[email], 'subscription': subscriptions_by_email[email], 'here': self, 'anonymous': anonymous_users[email] } template = self._get_template(notif_type) self._send_notifications(messages_by_email, template) def _cron_heartbeat(self, when): transaction.commit() # commit earlier stuff; fresh transaction transaction.get().note('notifications cron at %s' % ofs_path(self)) # Clean temporary subscriptions after a week: if self.config.get('enable_anonymous', False): a_week_ago = when - timedelta(weeks=1) for tmp_subscription in self.pending_anonymous_subscriptions[:]: if tmp_subscription.datetime <= a_week_ago: self.pending_anonymous_subscriptions.remove( tmp_subscription) # daily newsletter ### if self.config['enable_daily']: # calculate the most recent daily newsletter time daily_time = time(hour=self.config['daily_hour']) latest_daily = datetime.combine(when.date(), daily_time) if latest_daily > when: latest_daily -= timedelta(days=1) # check if we should send a daily newsletter prev_daily = self.timestamps.get('daily', when - timedelta(days=1)) if prev_daily < latest_daily < when: self._send_newsletter('daily', prev_daily, when) self.timestamps['daily'] = when # weekly newsletter ### if self.config['enable_weekly']: # calculate the most recent weekly newsletter time weekly_time = time(hour=self.config['daily_hour']) t = datetime.combine(when.date(), weekly_time) days_delta = self.config['weekly_day'] - t.isoweekday() latest_weekly = t + timedelta(days=days_delta) if latest_weekly > when: latest_weekly -= timedelta(weeks=1) # check if we should send a weekly newsletter prev_weekly = self.timestamps.get('weekly', when - timedelta(weeks=1)) if prev_weekly < latest_weekly < when: self._send_newsletter('weekly', prev_weekly, when) self.timestamps['weekly'] = when # monthly newsletter ### if self.config['enable_monthly']: # calculate the most recent monthly newsletter time monthly_time = time(hour=self.config['monthly_hour']) the_day = utils.set_day_of_month(when.date(), self.config['monthly_day']) latest_monthly = datetime.combine(the_day, monthly_time) if latest_monthly > when: latest_monthly = utils.minus_one_month(latest_monthly) # check if we should send a monthly newsletter prev_monthly = self.timestamps.get('monthly', utils.minus_one_month(when)) if prev_monthly < latest_monthly < when: self._send_newsletter('monthly', prev_monthly, when) self.timestamps['monthly'] = when transaction.commit() # make sure our timestamp updates are saved def index_html(self, RESPONSE): """ redirect to admin page """ RESPONSE.redirect(self.absolute_url() + '/my_subscriptions_html') security.declareProtected(view, 'my_subscriptions_html') my_subscriptions_html = NaayaPageTemplateFile( 'zpt/index', globals(), 'naaya.core.notifications.my_subscriptions') security.declarePrivate('list_user_subscriptions') def user_subscriptions(self, user, cutoff_level=None): """ Returns all user subscriptions in the portal. Use with caution as this iterates almost all the objects in site. You can use `cutoff_level` to limit the depth. """ out = [] user_id = user.getId() for obj, n, subscription in utils.walk_subscriptions(self.getSite(), cutoff_level): if not isinstance(subscription, AccountSubscription): continue if subscription.user_id != user_id: continue out.append({ 'object': obj, 'notif_type': subscription.notif_type, 'content_types': getattr(subscription, 'content_types', []), 'lang': subscription.lang }) return out security.declareProtected(view, 'user_not_found') def user_not_found(self, REQUEST): """ Returns True if the user is not Anonymous, but is still not found by the AuthenticationTool (i.e. is maybe defined in the Zope root) """ user = REQUEST.AUTHENTICATED_USER if not isinstance(user, basestring): # with LDAP authentication, user is LDAP user instance user = user.id acl_tool = self.getAuthenticationTool() if acl_tool.get_user_with_userid(user) is None: return True security.declareProtected(view, 'list_my_subscriptions') def list_my_subscriptions(self, REQUEST): """ Returns a list of mappings (location, notif_type, lang) for all subscriptions of logged-in user """ user = REQUEST.AUTHENTICATED_USER if user.getId() is None and not self.config.get('enable_anonymous', False): raise Unauthorized # to force login subscriptions = self.user_subscriptions(user) for subscription in subscriptions: subscription['location'] = path_in_site(subscription['object']) del subscription['object'] return subscriptions security.declareProtected(view, 'my_first_subscription') def get_location_subscription(self, location, notif_type=None): """ Returns the first of the authenticated user's subscriptions in location """ for subscription in self.list_my_subscriptions(self.REQUEST): if subscription['location'] == location: if notif_type: if subscription['notif_type'] == notif_type: return subscription else: return subscription security.declareProtected(view, 'subscribe_me') def subscribe_me(self, REQUEST, location, notif_type, lang=None, content_types=[]): """ add subscription for currently-logged-in user """ # Even if some content types were selected (by turning off javascript) # they should be ignored, no filtering in administrative notifications if notif_type == 'administrative': content_types = [] if isinstance(content_types, basestring): content_types = [content_types] if lang is None: lang = self.gl_get_selected_language() REQUEST.form['lang'] = lang user_id = REQUEST.AUTHENTICATED_USER.getId() if location == '/': location = '' if user_id is None and not self.config.get('enable_anonymous', False): raise Unauthorized # to force login try: if user_id: self.add_account_subscription(user_id, location, notif_type, lang, content_types) if content_types: self.setSessionInfoTrans( 'You will receive ${notif_type} notifications' ' for any changes in "${location}" for objects of ' 'types ${content_types}.', notif_type=notif_type, location=location or self.getSite().title, content_types=', '.join(content_types)) else: self.setSessionInfoTrans( 'You will receive ${notif_type} notifications' ' for any changes in "${location}".', notif_type=notif_type, location=location) else: self.add_anonymous_subscription(**dict(REQUEST.form)) self.setSessionInfoTrans( 'An activation e-mail has been sent to ${email}. ' 'Follow the instructions to subscribe to ${notif_type} ' 'notifications for any changes in "${location}".', notif_type=notif_type, location=location, content_types=content_types, email=REQUEST.form.get('email')) except ValueError, msg: self.setSessionErrors([unicode(msg)]) return REQUEST.RESPONSE.redirect(self.absolute_url() + '/my_subscriptions_html')
class Container(DexterityContent): """Base class for folderish items """ __providedBy__ = FTIAwareSpecification() def __init__(self, id=None, **kwargs): self._data = PersistentDict() self._order = PersistentList() DexterityContent.__init__(self, id, **kwargs) def __getattr__(self, name, default=None): return DexterityContent.__getattr__(self, name) def keys(self): return self._order[:] def __iter__(self): return iter(self.keys()) def __getitem__(self, key): return self._data[key] async def asyncget(self, key): return await synccontext(self)(self._data.__getitem__, key) def get(self, key, default=None): return self._data.get(key, default) def values(self): return [self._data[i] for i in self._order] def __len__(self): return len(self._data) def items(self): return [(i, self._data[i]) for i in self._order] def __contains__(self, key): return key in self._data has_key = __contains__ def __setitem__(self, key, object): existed = key in self._data bad = False if not isinstance(key, six.string_types): bad = True if bad: raise TypeError("'%s' is invalid, the key must be an " "ascii or unicode string" % key) if len(key) == 0: raise ValueError("The key cannot be an empty string") # We have to first update the order, so that the item is available, # otherwise most API functions will lie about their available values # when an event subscriber tries to do something with the container. if not existed: self._order.append(key) # This function creates a lot of events that other code listens to. try: setitem(self, self._data.__setitem__, key, object) except Exception: if not existed: self._order.remove(key) raise return key def __delitem__(self, key): uncontained(self._data[key], self, key) del self._data[key] self._order.remove(key) def updateOrder(self, order): """ See `IOrderedContainer`. >>> oc = OrderedContainer() >>> oc['foo'] = 'bar' >>> oc['baz'] = 'quux' >>> oc['zork'] = 'grue' >>> oc.keys() ['foo', 'baz', 'zork'] >>> oc.updateOrder(['baz', 'foo', 'zork']) >>> oc.keys() ['baz', 'foo', 'zork'] >>> oc.updateOrder(['baz', 'zork', 'foo']) >>> oc.keys() ['baz', 'zork', 'foo'] >>> oc.updateOrder(['baz', 'zork', 'foo']) >>> oc.keys() ['baz', 'zork', 'foo'] >>> oc.updateOrder(('zork', 'foo', 'baz')) >>> oc.keys() ['zork', 'foo', 'baz'] >>> oc.updateOrder(['baz', 'zork']) Traceback (most recent call last): ... ValueError: Incompatible key set. >>> oc.updateOrder(['foo', 'bar', 'baz', 'quux']) Traceback (most recent call last): ... ValueError: Incompatible key set. >>> oc.updateOrder(1) Traceback (most recent call last): ... TypeError: order must be a tuple or a list. >>> oc.updateOrder('bar') Traceback (most recent call last): ... TypeError: order must be a tuple or a list. >>> oc.updateOrder(['baz', 'zork', 'quux']) Traceback (most recent call last): ... ValueError: Incompatible key set. >>> del oc['baz'] >>> del oc['zork'] >>> del oc['foo'] >>> len(oc) 0 """ if not isinstance(order, list) and \ not isinstance(order, tuple): raise TypeError('order must be a tuple or a list.') if len(order) != len(self._order): raise ValueError("Incompatible key set.") was_dict = {} will_be_dict = {} new_order = PersistentList() for i in range(len(order)): was_dict[self._order[i]] = 1 will_be_dict[order[i]] = 1 new_order.append(order[i]) if will_be_dict != was_dict: raise ValueError("Incompatible key set.") self._order = new_order notifyContainerModified(self)
class ZopeStore(OpenIDStore): """Zope OpenID store. This class implements an OpenID store which uses the ZODB. """ def __init__(self): self.associations=OOBTree() self.handles=OOBTree() self.nonces=OITreeSet() self.noncetimeline=PersistentList() self.assoctimeline=PersistentList() def getAssociationKey(self, server_url, handle): """Generate a key used to identify an association in our storage. """ if handle is None: return self.handles[server_url][0] return (server_url, handle) def storeAssociation(self, server_url, association): key=self.getAssociationKey(server_url, association.handle) self.associations[key]=association.serialize() now=time.time() def getKey(item): return self.getAssociation(item[0], item[1], remove=False).getExpiresIn(now) lst=self.handles.get(server_url, []) lst.append(key) lst.sort(key=getKey) self.handles[server_url]=lst self.assoctimeline.append((association.issued+association.lifetime, key)) def getAssociation(self, server_url, handle=None, remove=True): try: key=self.getAssociationKey(server_url, handle) assoc=Association.deserialize(self.associations[key]) except KeyError: return None if remove and assoc.getExpiresIn()==0: self.removeAssociation(server_url, handle) return None return assoc def removeAssociation(self, server_url, handle): key=self.getAssociationKey(server_url, handle) try: assoc=Association.deserialize(self.associations[key]) del self.associations[key] lst=self.handles[server_url] lst.remove(key) self.handles[server_url]=lst self.assoctimeline.remove((assoc.issued+assoc.lifetime, key)) return True except KeyError: return False def useNonce(self, server_url, timestamp, salt): nonce = (salt, server_url) if nonce in self.nonces: return False self.nonces.insert(nonce) if not hasattr(self, "noncetimeline"): # BBB for store instances from before 1.0b2 self.noncetimeline=PersistentList() self.noncetimeline.append((timestamp, nonce)) return True def cleanupNonces(self): if not hasattr(self, "noncetimeline"): return 0 cutoff=time.time()+SKEW count=0 for (timestamp,nonce) in self.noncetimeline: if timestamp<cutoff: self.noncetimeline.remove((timestamp,nonce)) self.nonces.remove(nonce) count+=1 return count def cleanupAssociations(self): if not hasattr(self, "assoctimeline"): return 0 now=time.time() count=0 expired=(key for (timestamp,key) in self.assoctimeline if timestamp<=now) for key in expired: self.removeAssociation(*key) count+=1 return count
class Alert(VisualisableElement, Entity): """Alert class""" templates = { InternalAlertKind.content_alert: { 'default': 'lac:views/templates/alerts/content_result.pt', 'small': 'lac:views/templates/alerts/small_content_result.pt' }, InternalAlertKind.moderation_alert: { 'default': 'lac:views/templates/alerts/moderation_result.pt', 'small': 'lac:views/templates/alerts/small_moderation_result.pt' }, InternalAlertKind.service_alert: { 'default': 'lac:views/templates/alerts/service_result.pt', 'small': 'lac:views/templates/alerts/small_service_result.pt' } } icon = 'glyphicon glyphicon-bell' subjects = SharedMultipleProperty('subjects') users_to_alert = SharedMultipleProperty('users_to_alert') def __init__(self, kind, **kwargs): super(Alert, self).__init__(**kwargs) self.set_data(kwargs) self.kind = kind self.users_to_alert = PersistentList() def init_alert(self, users, subjects=[]): self.subscribe(users) for subject in subjects: self.addtoproperty('subjects', subject) def subscribe(self, users): if not isinstance(users, (list, tuple)): users = [users] self.users_to_alert.extend( [str(get_oid(user, user)) for user in users]) def unsubscribe(self, user): key = str(get_oid(user, user)) if key in self.users_to_alert: self.users_to_alert.remove(key) user.addtoproperty('old_alerts', self) self.reindex() def get_subject_state(self, subject, user): return get_states_mapping(user, subject, getattr(subject, 'state_or_none', [None])[0]) def get_templates(self): return self.templates.get(self.kind, {}) def is_kind_of(self, kind): return kind == self.kind def has_args(self, **kwargs): for key in kwargs: if getattr(self, key, None) != kwargs[key]: return False return True
class Container(Object): order_limit = 300 container_order_limit = 300 size = 0 def __init__(self): self._data = OOBTree() self._order = PersistentList() self._container_order = PersistentList() def choose_name(self, name, obj=None): return INameChooser(self).chooseName(name, obj) @property def tag_groups(self): return TagGroups(self) @property def quota(self): return Quota(self) def keys(self): for key in self._order: yield key for key in self._data: if key not in self._order: yield key def __iter__(self): return iter(self.keys()) def __getitem__(self, key): return self._data[key] def get(self, key, default=None): return self._data.get(key, default) def values(self): return [self._data.get(key) for key in self.keys()] def __len__(self): return len(self._data) def items(self): return ((i, self._data.get(i)) for i in self.keys()) def __contains__(self, key): return self._data.has_key(key) has_key = __contains__ def __setitem__(self, key, obj): existed = self._data.has_key(key) bad = False if isinstance(key, StringTypes): try: unicode(key) except UnicodeError: bad = True else: bad = True if bad: raise TypeError("'%s' is invalid, the key must be an " "ascii or unicode string" % key) if len(key) == 0: raise ValueError("The key cannot be an empty string") # We have to first update the order, so that the item is available, # otherwise most API functions will lie about their available values # when an event subscriber tries to do something with the container. if not existed: if len(self._order) < self.order_limit: self._order.append(key) if len(self._container_order) < self.container_order_limit: if 'Container' in getattr(obj, 'object_types', []): self._container_order.append(key) # This function creates a lot of events that other code listens to. try: setitem(self, self._data.__setitem__, key, obj) except Exception: if not existed: if key in self._order: self._order.remove(key) if key in self._container_order: self._container_order.remove(key) raise return key def __delSublocations(self, obj): subs = ISublocations(obj, None) if subs is not None: for sub in subs.sublocations(): sub._v_del = True if ISublocations(sub, None): self.__delSublocations(sub) def __delitem__(self, name): obj = self[name] self.__delSublocations(obj) uncontained(self._data[name], self, name) del self._data[name] if name in self._order: self._order.remove(name) if name in self._container_order: self._container_order.remove(name) def ordered_keys(self): return self._order def ordered_container_keys(self): return self._container_order def ordered_containers(self): return (self._data.get(key) for key in self._container_order) def set_order(self, order): if not isinstance(order, ListType) and \ not isinstance(order, TupleType): raise TypeError('order must be a tuple or a list.') for i in order: if i not in self: raise ValueError('order item not in container.') self._order = PersistentList(order) notifyContainerModified(self) def set_container_order(self, order): if not isinstance(order, ListType) and \ not isinstance(order, TupleType): raise TypeError('order must be a tuple or a list.') for i in order: if i not in self: raise ValueError('order item not in container.') self._container_order = PersistentList(order) notifyContainerModified(self) def object_path(self, obj): parents = [] while obj is not self: if obj.__name__: parents.insert(0, obj.__name__) obj = obj.__parent__ if obj is None: return None return '/'.join(parents) def object_by_path(self, path): context = self for name in filter(lambda i: i != '', path.split('/')): context = context.get(name) if context is None: return return context def remove(self, name): del self[name]
class Character(TZContainer): 'Base class for Player and Mob classes.' gettable = False stats_list = ['health', 'strength', ] health = int_attr('health') strength = int_attr('strength') settings = ['home'] + stats_list def __init__(self, name='', short='', long=''): self._rid = None TZContainer.__init__(self, name, short, long) self._hid = None self._follow_id = None self._set_default_stats() self._wearing_ids = PersistentList() self.awake = True self.standing = True self.following = None def __repr__(self): return u''' Character (%s): %s ''' % (self.tzid, self.short) def _set_default_stats(self): self._stats0 = PersistentDict() for name in self.stats_list: self._stats0[name] = self.setting(name) def go(self, x): '''Character is trying to go through exit x. Passes through the return value from the exit. ''' r = x.go(self) success, msg = r if success: room = x.room dest = x.destination room.action(dict(act='leave', actor=self, tox=x)) self.move(dest) backx = None for backx in dest.exits(): if backx.destination == room: break dest.action(dict(act='arrive', actor=self, fromx=backx)) return r def look_at(self, obj): '''This character is looking at the given object. returns a list of strings. Takes in to account whether the object is visible to this character. ''' if self.can_see(obj): return obj.look(self) def can_see(self, obj): '''return True if character can see the given object. ''' if obj is None: return False if obj.visible or obj is self or wizard.verify(self): return True else: return False def look(self, looker): '''Return a multiline message (list of strings) for a player looking at this character. ''' msgs = TZContainer.look(self, looker) iis = set(filter(looker.can_see, self.items())) if iis: worn = set(i for i in iis if self.is_wearing(i)) held = iis - worn if held: msgs.append('') msgs.append('Holding:') for item in held: msgs.append(' ' + unicode(item)) if worn: msgs.append('') msgs.append('Wearing:') for item in worn: msgs.append(' ' + unicode(item)) return msgs def _get_room(self): ''''Getter for the room property. Character.room is a read-only property to ensure that Characters are only moved around using the .move() method. There are only a few places that should change a character's room. ''' return rooms.get(self._rid) room = property(_get_room) def _set_home(self, room): 'Setter for the home property.' if room is not None: self._hid = room.tzid else: self._hid = None def _get_home(self): 'Getter for the home property.' return rooms.get(self._hid) or rooms.get(conf.home_id) home = property(_get_home, _set_home) def set_home(self, iden): '''iden can be either a room, or the name or id# of a room, or if iden is 'True', set home to the room the character is currently in, or if iden is 0, None, 'None', False, or 'False', unset the home (set it to None -- which essentially sets it back to the default value set in conf.py). ''' if iden == 'True': self.home = self.room return True if iden in (0, None, 'None', False, 'False'): self.home = None return True if hasattr(iden, 'tzid') and rooms.get(iden.tzid)==iden: room = iden self.home = room return True if hasattr(iden, 'isdigit'): if iden.isdigit(): room = rooms.get(int(iden)) if room is not None: self.home = room return True else: return False else: room = rooms.getname(iden) if room is not None: self.home = room return True return False def _get_following(self): '''Getter for the following property. The following property indicates which mob or player this character is following. If possible, when that character leaves the room, this character will follow along. ''' if self._follow_id is None: return None else: player = players.get(self._follow_id) mob = mobs.get(self._follow_id) if player is not None: return player elif mob is not None: return mob else: return None def _set_following(self, c): 'Setter for the following property.' if c is not None: if c == self: self._follow_id = None else: self._follow_id = c.tzid else: self._follow_id = None following = property(_get_following, _set_following) def get_item(self, item, room): 'Get item from room. return True if successful, else False.' if item.get(self): room.remove(item) if self.exists(): self.add(item) room.action(dict(act='get', actor=self, item=item)) return True else: return False def drop_item(self, item): 'Drop item from inventory.' if self.is_wearing(item): self.unwear(item) item.unwear(self) item.drop(self) self.remove(item) room = self.room if item.exists(): room.add(item) room.action(dict(act='drop', actor=self, item=item)) def wear(self, item): "Add the given item to this character's list of worn items." if not item in self or not item.wearable: return False elif item.tzid in self._wearing_ids: return False else: success = item.wear(self) if success: self._wearing_ids.append(item.tzid) self.room.action(dict(act='wear', actor=self, item=item)) return success def wearing(self): 'Return a list of the items this character is wearing.' return [tzindex.get(tzid) for tzid in self._wearing_ids] def is_wearing(self, item): 'Return True if this character is wearing the given item.' if item.tzid in self._wearing_ids: return True else: return False def unwear(self, item): "Remove the given item from this character's list of worn items." if self.is_wearing(item): success = item.unwear(self) if success: self._wearing_ids.remove(item.tzid) self.room.action(dict(act='unwear', actor=self, item=item)) return success else: return False # near actions def near_leave(self, info): 'Someone left the room this character is in.' leaver = info['actor'] x = info['tox'] if leaver is not self and self.can_see(leaver): if self.following==leaver and self.awake: reactor.callLater(0.3, self._follow, leaver, x) def _follow(self, leaver, x): 'Follow along if this character is following someone who left.' try: if leaver not in self.room: self.go(x) except: #print 'Character._follow ABORT' abort() else: #print 'Character._follow COMMIT' commit() def teleport(self, destination=None): destination = destination or self.home if destination is not None: room = self.room if room is not None: room.action(dict(act='teleport_character_away', delay=0.2, actor=None, character=self)) reactor.callLater(0.4, self.move, destination) destination.action(dict(act='teleport_character_in', delay=0.6, actor=None, character=self))
class BusinessAction(Wizard, LockableElement, Persistent): node_definition = NotImplemented context = NotImplemented processs_relation_id = NotImplemented actionType = NotImplemented behavior_id = '' #validation relation_validation = NotImplemented roles_validation = NotImplemented processsecurity_validation = NotImplemented state_validation = NotImplemented #style information access_controled = False def __init__(self, workitem, **kwargs): super(BusinessAction, self).__init__(**kwargs) self.workitem = workitem self.isexecuted = False self.behavior_id = self.behavior_id or self.node_id self.sub_process = None self.local_assigned_to = PersistentList() if self.title == '' or self.title is NotImplemented: self.title = self.node.title if self.description == '' or self.description is NotImplemented: self.description = self.node.description @classmethod def get_instance(cls, context, request, **kw): action_uid = request.params.get('action_uid', None) source_action = None if action_uid: source_action = get_obj(int(action_uid)) if source_action and \ source_action._class_ is cls and \ source_action.validate(context, request): return source_action instances = getBusinessAction(context, request, cls.node_definition.process.id, cls.node_definition.__name__, action_type=cls, validate=kw.get('validate', True)) if instances is None: return None isstart = request.params.get('isstart', False) if isstart: for inst in instances: if inst.isstart: return inst return instances[0] @classmethod def get_allinstances(cls, context, request, **kw): instance = getBusinessAction(context, request, cls.node_definition.process.id, cls.node_definition.__name__) return instance @classmethod def get_validator(cls, **kw): return getBusinessActionValidator(cls) @property def potential_contexts_ids(self): try: contexts = self.process.execution_context.involved_entities( self.processs_relation_id) result = [] for context in contexts: try: result.append(str(get_oid(context))) except Exception: pass return result except Exception: return ['any'] @property def actions(self): allactions = getAllBusinessAction(self) return [ActionCall(a, self) for a in allactions] @property def process(self): return self.workitem.process @property def node(self): return self.workitem.node @property def process_id(self): return self.workitem.process_id @property def definition(self): if self.node_definition is not NotImplemented: return self.node_definition return self.node.definition if isinstance(self.node, BPMNElement)\ else self.node @property def node_id(self): return self.definition.__name__ @property def groups(self): return self.definition.groups @property def view_name(self): return self.action_view.name @property def isautomatic(self): return self.actionType is ActionType.automatic @property def issystem(self): return self.actionType is ActionType.system @property def isstart(self): return isinstance(self.workitem, StartWorkItem) @property def informations(self):# pragma: no cover if self.process is not None: return 'Description: ' + \ self.description + \ '\n Process: '+self.process.title else: return 'Description: ' + \ self.description + \ '\n Process: '+self.node.process.id @property def action_view(self): return DEFAULTMAPPING_ACTIONS_VIEWS.get(self.__class__, None) @property def assigned_to(self): if getattr(self, 'local_assigned_to', []): return self.local_assigned_to return getattr(self.node, 'assigned_to', []) def get_potential_context(self, request=None): if request is None: request = get_current_request() entities = [] try: entities = [self.process.execution_context.involved_entity( self.processs_relation_id)] except Exception: try: entities = self.process.execution_context.involved_collection( self.processs_relation_id) except Exception: entities = find_entities((self.context,)) for entity in entities: try: if entity: self.validate(entity, request) return entity except ValidationError: continue return None def url(self, obj): query = {} try: actionuid = get_oid(self) query = {'action_uid': actionuid} except AttributeError: query = {'isstart': 'True'} return get_current_request().resource_url( obj, '@@'+self.view_name, query=query) def assigne_to(self, users): if not isinstance(users, (list, tuple)): users = [users] users = [u for u in users if u not in self.local_assigned_to] self.local_assigned_to.extend(users) def unassigne(self, users): if not isinstance(users, (list, tuple)): users = [users] users = [u for u in users if u in self.local_assigned_to] for user in users: self.local_assigned_to.remove(user) def set_assignment(self, users=None): self.local_assigned_to = PersistentList() if users is not None: self.assigne_to(users) def validate(self, context, request, **kw): is_valid, message = self.validate_mini(context, request, **kw) if not is_valid: raise ValidationError(msg=message) return True def validate_mini(self, context, request, **kw): return validate_action(self, context, request, **kw) def before_execution(self, context, request, **kw): self.lock(request) self.workitem.lock(request) def _consume_decision(self): if isinstance(self.workitem, UserDecision): self.workitem.consume() def start(self, context, request, appstruct, **kw): return {} def execute(self, context, request, appstruct, **kw): self._consume_decision() if self.isstart: return if isinstance(self.node, SubProcess) and not self.sub_process: self.sub_process = self.node._start_subprocess(self) if self.sub_process: if ITEM_INDEX in kw: self.sub_process.execution_context.add_involved_entity( ITEM_INDEX, kw[ITEM_INDEX]) self.process.execution_context.add_sub_execution_context( self.sub_process.execution_context) def finish_execution(self, context, request, **kw): self.after_execution(context, request, **kw) def after_execution(self, context, request, **kw): self.unlock(request) self.workitem.unlock(request) # TODO self.workitem is a real workitem? if self.isexecuted: self.workitem.node.finish_behavior(self.workitem) def redirect(self, context, request, **kw): return kw def cancel_execution(self, context, request, **kw): self.unlock(request) self.workitem.unlock(request) def reindex(self): event = ObjectModified(self) registry = get_current_registry() registry.subscribers((event, self), None)
class BusinessAction(Wizard, LockableElement, Persistent): node_definition = NotImplemented context = NotImplemented processs_relation_id = NotImplemented actionType = NotImplemented behavior_id = '' #validation relation_validation = NotImplemented roles_validation = NotImplemented processsecurity_validation = NotImplemented state_validation = NotImplemented #style information access_controled = False def __init__(self, workitem, **kwargs): super(BusinessAction, self).__init__(**kwargs) self.workitem = workitem self.isexecuted = False self.behavior_id = self.behavior_id or self.node_id self.sub_process = None self.local_assigned_to = PersistentList() if self.title == '' or self.title is NotImplemented: self.title = self.node.title if self.description == '' or self.description is NotImplemented: self.description = self.node.description @classmethod def get_instance(cls, context, request, **kw): action_uid = request.params.get('action_uid', None) source_action = None if action_uid: source_action = get_obj(int(action_uid)) if source_action and \ source_action._class_ is cls and \ source_action.validate(context, request): return source_action instances = getBusinessAction(context, request, cls.node_definition.process.id, cls.node_definition.__name__, action_type=cls, validate=kw.get('validate', True)) if instances is None: return None isstart = request.params.get('isstart', False) if isstart: for inst in instances: if inst.isstart: return inst return instances[0] @classmethod def get_allinstances(cls, context, request, **kw): instance = getBusinessAction(context, request, cls.node_definition.process.id, cls.node_definition.__name__) return instance @classmethod def get_validator(cls, **kw): return getBusinessActionValidator(cls) @property def potential_contexts_ids(self): try: contexts = self.process.execution_context.involved_entities( self.processs_relation_id) result = [] for context in contexts: try: result.append(str(get_oid(context))) except Exception: pass return result except Exception: return ['any'] @property def actions(self): allactions = getAllBusinessAction(self) return [ActionCall(a, self) for a in allactions] @property def process(self): return self.workitem.process @property def node(self): return self.workitem.node @property def process_id(self): return self.workitem.process_id @property def definition(self): if self.node_definition is not NotImplemented: return self.node_definition return self.node.definition if isinstance(self.node, BPMNElement)\ else self.node @property def node_id(self): return self.definition.__name__ @property def groups(self): return self.definition.groups @property def view_name(self): return self.action_view.name @property def isautomatic(self): return self.actionType is ActionType.automatic @property def issystem(self): return self.actionType is ActionType.system @property def isstart(self): return isinstance(self.workitem, StartWorkItem) @property def informations(self): # pragma: no cover if self.process is not None: return 'Description: ' + \ self.description + \ '\n Process: '+self.process.title else: return 'Description: ' + \ self.description + \ '\n Process: '+self.node.process.id @property def action_view(self): return DEFAULTMAPPING_ACTIONS_VIEWS.get(self.__class__, None) @property def assigned_to(self): if getattr(self, 'local_assigned_to', []): return self.local_assigned_to return getattr(self.node, 'assigned_to', []) def get_potential_context(self, request=None): if request is None: request = get_current_request() entities = [] try: entities = [ self.process.execution_context.involved_entity( self.processs_relation_id) ] except Exception: try: entities = self.process.execution_context.involved_collection( self.processs_relation_id) except Exception: entities = find_entities((self.context, )) for entity in entities: try: if entity: self.validate(entity, request) return entity except ValidationError: continue return None def url(self, obj): query = {} try: actionuid = get_oid(self) query = {'action_uid': actionuid} except AttributeError: query = {'isstart': 'True'} return get_current_request().resource_url(obj, '@@' + self.view_name, query=query) def assigne_to(self, users): if not isinstance(users, (list, tuple)): users = [users] users = [u for u in users if u not in self.local_assigned_to] self.local_assigned_to.extend(users) def unassigne(self, users): if not isinstance(users, (list, tuple)): users = [users] users = [u for u in users if u in self.local_assigned_to] for user in users: self.local_assigned_to.remove(user) def set_assignment(self, users=None): self.local_assigned_to = PersistentList() if users is not None: self.assigne_to(users) def validate(self, context, request, **kw): is_valid, message = self.validate_mini(context, request, **kw) if not is_valid: raise ValidationError(msg=message) return True def validate_mini(self, context, request, **kw): return validate_action(self, context, request, **kw) def before_execution(self, context, request, **kw): self.lock(request) self.workitem.lock(request) def _consume_decision(self): if isinstance(self.workitem, UserDecision): self.workitem.consume() def start(self, context, request, appstruct, **kw): return {} def execute(self, context, request, appstruct, **kw): self._consume_decision() if self.isstart: return if isinstance(self.node, SubProcess) and not self.sub_process: self.sub_process = self.node._start_subprocess(self) if self.sub_process: if ITEM_INDEX in kw: self.sub_process.execution_context.add_involved_entity( ITEM_INDEX, kw[ITEM_INDEX]) self.process.execution_context.add_sub_execution_context( self.sub_process.execution_context) def finish_execution(self, context, request, **kw): self.after_execution(context, request, **kw) def after_execution(self, context, request, **kw): self.unlock(request) self.workitem.unlock(request) # TODO self.workitem is a real workitem? if self.isexecuted: self.workitem.node.finish_behavior(self.workitem) def redirect(self, context, request, **kw): return kw def cancel_execution(self, context, request, **kw): self.unlock(request) self.workitem.unlock(request) def reindex(self): event = ObjectModified(self) registry = get_current_registry() registry.subscribers((event, self), None)
class CoreFilter(Persistent): """Core persistent record filter implementation""" implements(IRecordFilter) def __init__(self, *args, **kwargs): super(CoreFilter, self).__init__(*args, **kwargs) self._uid = str(uuid.uuid4()) self.reset(**kwargs) def reset(self, **kwargs): self.operator = kwargs.get('operator', 'AND') self._queries = PersistentMapping() self._order = PersistentList() def validate(self, schema): for q in self._queries.values(): if not q.validate(schema): raise ValidationError(q.fieldname) def build(self, schema): self.validate(schema) return filter_query(self, schema) def add(self, query=None, **kwargs): if query is None: ## attempt to make query from kwargs given either ## field/comparator/value or fieldname/comparator/value field = kwargs.get('field', None) fieldname = kwargs.get('fieldname', None) if not (field or fieldname): raise ValueError('Field missing for query construction') if fieldname is None and field: fieldname = field.__name__ comparator = kwargs.get('comparator', None) value = kwargs.get('value', None) if not (value and comparator): raise ValueError('Missing value or comparator') query = FieldQuery(fieldname, comparator, value) fieldname = query.fieldname self._queries[fieldname] = query self._order.append(fieldname) def remove(self, query): if IFieldQuery.providedBy(query): query = query.fieldname if query not in self._queries: raise KeyError('Query not found (fieldname: %s)' % query) del(self._queries[query]) self._order.remove(query) ## RO mapping interface def get(self, name, default=None): return self._queries.get(name, default) def __len__(self): return len(self._order) def __getitem__(self, name): v = self.get(name, None) if v is None: raise KeyError(name) # fieldname not found return v def __contains__(self, name): if IFieldQuery.providedBy(name): name = name.field.__name__ return name in self._order def keys(self): return list(self._order) def iterkeys(self): return self._order.__iter__() def itervalues(self): return itertools.imap(lambda k: self.get(k), self.iterkeys()) def iteritems(self): return itertools.imap(lambda k: (k, self.get(k)), self.iterkeys()) def values(self): return list(self.itervalues()) def items(self): return list(self.iteritems())
class Ordering(Persistent): """ Store information about the ordering of items within a folder. """ implements(IOrdering) def __init__(self): Persistent.__init__(self) self._items = PersistentList() def sync(self, entries): # Go do some cleanup. Any items that are in the folder but # not in the ordering, put at end. Any items that are in the # ordering but not in the folder, remove. for local_name in self._items: if local_name not in entries: # Item is in ordering but not in context, remove from # ordering. self._items.remove(local_name) for entry_name in entries: if entry_name not in self._items: # Item is in folder but not in ordering, append to # end. self._items.append(entry_name) def moveUp(self, name): # Move the item with __name__ == name up a position. If at # the beginning, move to last position. position = self._items.index(name) del self._items[position] if position == 0: # Roll over to the end self._items.append(name) else: self._items.insert(position - 1, name) def moveDown(self, name): # Move the item with __name__ == name down a position. If at # the end, move to the first position. position = self._items.index(name) list_length = len(self._items) del self._items[position] if position == (list_length - 1): # Roll over to the end self._items.insert(0, name) else: self._items.insert(position + 1, name) def add(self, name): # When a new item is added to a folder, put it at the end. if name not in self._items: self._items.append(name) def remove(self, name): # When an existing item is removed from folder, remove from # ordering. Sure would be nice to use events to do this for # us. if name in self._items: self._items.remove(name) def items(self): return self._items def previous_name(self, current_name): # Given a position in the list, get the next name, or None if # at the end of the list position = self._items.index(current_name) if position == 0: # We are at the end of the list, so return None return None else: return self._items[position - 1] def next_name(self, current_name): # Given a position in the list, get the next name, or None if # at the end of the list position = self._items.index(current_name) if position == (len(self._items)-1): # We are at the end of the list, so return None return None else: return self._items[position + 1]
class Igrac(Persistent): def __init__(self, nadimak, jeLiRacunalo, igra): self.nadimak = nadimak self.karte = PersistentList() self.igra = igra self.jeLiRacunalo = jeLiRacunalo self.zastavice = PersistentDict() self.zastavice.update({ 'uzmiKarte': 0, 'provjeriZvanja': 0, 'hocuLiZvati': 0, 'baciKartu': 0 }) self.igra.onSudjeluj(self) transaction.commit() def uzmiKarte(self): global vidljiveKarteSprites global karteSpritesList self.karte.extend(self.igra.onDajKarte()) self.karte = self.sortirajKarte(self.karte) for i in range(len(self.karte)): kartaSprite = next((x for x in karteSpritesList if x.karta.slika == self.karte[i].slika), None) kartaSprite.pozicioniraj( (1000 - (100 * len(self.karte))) / 2 + 100 * i, 566) kartaSprite.layer = i kartaSprite.prikazi() vidljiveKarteSprites.add(kartaSprite) def sortirajKarte(self, karte): return sorted(karte, key=lambda karta: (karta.boja, karta.poredak), reverse=False) def provjeriZvanja(self): self.igra.onPrijaviZvanje(self, self.karte) def hocuLiZvati(self, moramLiZvati): if self.jeLiRacunalo == True: jacinaAduta = {'Herc': 0, 'Bundeva': 0, 'Zelena': 0, 'Zir': 0} for karta in self.karte: if karta.boja == 'Herc': jacinaAduta['Herc'] += karta.vrijednostAduta elif karta.boja == 'Bundeva': jacinaAduta['Bundeva'] += karta.vrijednostAduta elif karta.boja == 'Zelena': jacinaAduta['Zelena'] += karta.vrijednostAduta elif karta.boja == 'Zir': jacinaAduta['Zir'] += karta.vrijednostAduta najjacaBoja = max(jacinaAduta, key=jacinaAduta.get) if jacinaAduta[najjacaBoja] > 30 or moramLiZvati: print self.nadimak + ": zovem " + najjacaBoja self.igra.onOdaberiAdut(najjacaBoja) else: print self.nadimak + ": dalje!" self.igra.onOdaberiAdut(False) return False def baciKartu(self, odabranaKarta=None): if (self.jeLiRacunalo == True): for karta in self.karte: if self.igra.onJeLiPoPravilima(self.karte, karta) == True: time.sleep(.01) print self.nadimak + ": ", karta self.karte.remove(karta) self.igra.onBaciKartu(karta) else: continue else: if self.igra.onJeLiPoPravilima(self.karte, odabranaKarta) == True: self.zastavice["baciKartu"] = 0 self.igra.onBaciKartu( self.karte.pop(self.karte.index(odabranaKarta))) return True else: return False
class SchemaManager(Persistent): """ Persistent schema manager, persists a list of dotted interface names, and resolves values at runtime dynamically with zope.dottedname. """ implements(ISchemaManager) def __init__(self): self._names = PersistentList() # dotted names ## mapping interface, with lazy resolution of schema ## interfaces by zope.dottedname import/resolution def get(self, name, default=None): name = str(name) if name not in self._names: return default v = _resolve(name) return v def __getitem__(self, name): v = self.get(name, None) if v is None: raise KeyError(name) return v def __contains__(self, name): if IInterface.providedBy(name): name = identify_interface(name) name = str(name) return name in self._names def keys(self): return list(self._names) def values(self): return list(self.itervalues()) def items(self): return list(self.iteritems()) def iterkeys(self): return self._names def itervalues(self): return itertools.imap(lambda k: self.get(k), self.iterkeys()) def iteritems(self): return itertools.imap(lambda k: (k, self.get(k)), self.iterkeys()) def __len__(self): return len(self._names) __iter__ = iterkeys ## mutable interface bind and forget: def bind(self, schema): if not IInterface.providedBy(schema): raise TypeError("Cannot bind non-interface object %s" % schema) name = identify_interface(schema) if name in self._names: raise KeyError("duplicate schema: Interface %s already managed." % (name,)) self._names.append(name) def forget(self, schema): name = str(schema) if IInterface.providedBy(schema): name = identify_interface(schema) if name not in self._names: return self._names.remove(name) def orphans(self): return tuple(k for k, v in self.iteritems() if v is None)
class MembraneTool(BaseTool): """Tool for managing members.""" id = TOOLNAME toolicon = "tool.gif" meta_type = "MembraneTool" archetype_name = "MembraneTool" user_adder = "" case_sensitive_auth = True _catalog_count = None implements(IMembraneTool, IAttributeAnnotatable) manage_options = ( {"label": "Types", "action": "manage_membranetypes"}, {"label": "Status Map", "action": "manage_statusmap"}, ) + BaseTool.manage_options security = ClassSecurityInfo() def __init__(self, *args, **kwargs): ZCatalog.__init__(self, self.getId()) self.membrane_types = PersistentList() def registerMembraneType(self, portal_type): if not USE_COLLECTIVE_INDEXING: attool = getToolByName(self, "archetype_tool") catalogs = [x.getId() for x in attool.getCatalogsByType(portal_type)] if TOOLNAME not in catalogs: catalogs.append(TOOLNAME) attool.setCatalogsByType(portal_type, catalogs) elif portal_type not in self.membrane_types: self.membrane_types.append(portal_type) # Trigger the status maps even if the type is already registered. notify(MembraneTypeRegisteredEvent(self, portal_type)) security.declareProtected(ManagePortal, "registerMembraneType") def unregisterMembraneType(self, portal_type): if not USE_COLLECTIVE_INDEXING: attool = getToolByName(self, "archetype_tool") catalogs = [x.getId() for x in attool.getCatalogsByType(portal_type)] if TOOLNAME in catalogs: catalogs.remove(TOOLNAME) attool.setCatalogsByType(portal_type, catalogs) elif portal_type in self.membrane_types: self.membrane_types.remove(portal_type) notify(MembraneTypeUnregisteredEvent(self, portal_type)) security.declareProtected(ManagePortal, "unregisterMembraneType") def listMembraneTypes(self): if not USE_COLLECTIVE_INDEXING: mtypes = [] attool = getToolByName(self, "archetype_tool") catalog_map = getattr(aq_base(attool), "catalog_map", {}) for t, c in catalog_map.items(): if self.getId() in c: mtypes.append(t) return mtypes else: return self.membrane_types security.declareProtected(permissions.VIEW_PUBLIC_PERMISSION, "listMembraneTypes") def getUserObject(self, login=None, user_id=None, brain=False): """ Return the authentication implementation (content item) for a given login or userid. """ query = {} if user_id: if self.case_sensitive_auth and ("exact_getUserId" in self._catalog.indexes): query["exact_getUserId"] = user_id else: query["getUserId"] = user_id elif login: if self.case_sensitive_auth and ("exact_getUserName" in self._catalog.indexes): query["exact_getUserName"] = login else: query["getUserName"] = login if not query: # No user_id or login name given return None query["object_implements"] = user_ifaces.IMembraneUserAuth.__identifier__ uSR = self.unrestrictedSearchResults members = uSR(**query) # filter out inadvertent ZCTextIndex matches by only keeping # records with the same number of characters if "getUserName" in query: members = [mem for mem in members if len(mem.getUserName) == len(login)] if "getUserId" in query: members = [mem for mem in members if len(mem.getUserId) == len(user_id)] if not members: return None if len(members) == 2: # Usually this is an error case, but when importing or # pasting a copy of a Plone site, the catalog can have # duplicate entries. If there are exactly 2 entries, and # one has a path that is not inside this Plone site, then # we assume this is what's happened and we clear out the # bogus entry. site = getToolByName(self, "portal_url").getPortalObject() site_path = "/".join(site.getPhysicalPath()) bogus = [b.getPath() for b in members if site_path not in b.getPath()] if len(bogus) == 1: # yup, clear it out and move on self._catalog.uncatalogObject(bogus[0]) members = uSR(**query) assert len(members) == 1, 'more than one member found for "%s"' % (login or user_id) if brain: return members[0] member = members[0]._unrestrictedGetObject() return member security.declarePrivate("getUserObject") def getOriginalUserIdCase(self, userid): """ Used to get the original case spelling of a given user id. """ if userid == "": return None uSR = self.unrestrictedSearchResults query = {"getUserId": userid, "object_implements": user_ifaces.IMembraneUserAuth.__identifier__} members = uSR(**query) # filter out inadvertent ZCTextIndex matches by only keeping # records with the same number of characters members = [mem for mem in members if len(mem.getUserId) == len(userid)] if not members: return None assert len(members) == 1 return members[0].getUserId