class Portlet(Persistent): __name__ = None __parent__ = None type_name = u"Portlet" type_title = _(u"Portlet") type_description = _(u"A mini view rendered ") portlet_type = u"" add_permission = "Add %s" % type_name def __init__(self, portlet_type, **kw): self.uid = unicode(uuid4()) self.portlet_type = portlet_type self.__settings__ = OOBTree() settings = kw.pop('settings', {}) self.settings = settings self.__dict__.update(**kw) super(Portlet, self).__init__() @property def title(self): return self.settings.get('title', getattr(self.portlet_adapter, 'title', u'')) @property def description(self): return self.settings.get( 'description', getattr(self.portlet_adapter, 'description', u'')) @property def settings(self): return self.__settings__ @settings.setter def settings(self, value): self.__settings__.clear() self.__settings__.update(value) @property def schema_factory(self): return self.portlet_adapter.schema_factory @property def portlet_adapter(self): reg = get_current_registry() return reg.getAdapter(self, IPortletType, name=self.portlet_type) @property def slot(self): try: return self.__parent__.slot except AttributeError: pass def render(self, context, request, view, **kw): try: return self.portlet_adapter.render(context, request, view, **kw) except ComponentLookupError: _logger.error("portlet %r not found for context %r" % (self.portlet_type, context)) return ""
class XMPPPasswordStorage(Persistent): implements(IXMPPPasswordStorage) def __init__(self): self._passwords = OOBTree() def get(self, user_id): if user_id in self._passwords: return self._passwords[user_id] return None def set(self, user_id): password = ''.join([random.choice(chars) for i in range(12)]) self._passwords[user_id] = password return password def remove(self, user_id): if user_id in self._passwords: del self._passwords[user_id] def clear(self): self._passwords.clear() #class PubSubStorage(object): # # implements(IPubSubStorage) # # def __init__(self): # self.items = dict() # self.node_items = dict() # self.collections = dict() # self.leaf_nodes = [] # self.publishers = dict() # self.comments = dict() # # def itemsFromNodes(self, nodes, start=0, count=20): # if not isinstance(nodes, list): # nodes = [nodes] # all_items = [self.node_items[node] # for node in nodes # if node in self.node_items] # ids = sorted(itertools.chain(*all_items), # key=lambda item_id: self.items[item_id]['updated'], reverse=True) # return [self.items[item_id] for item_id in ids[start:count + start]] # # def getItemById(self, item_id): # return self.items.get(item_id) # # def getNodeByItemId(self, item_id): # for node in self.leaf_nodes: # if item_id in self.node_items[node]: # return node # # def getCommentsForItemId(self, item_id): # if item_id not in self.comments: # return [] # return [self.items[iid] for iid in self.comments[item_id]]
class Portlet(Persistent): __name__ = None __parent__ = None type_name = u"Portlet" type_title = _(u"Portlet") type_description = _(u"A mini view rendered ") portlet_type = u"" add_permission = "Add %s" % type_name def __init__(self, portlet_type, **kw): self.uid = unicode(uuid4()) self.portlet_type = portlet_type self.__settings__ = OOBTree() settings = kw.pop('settings', {}) self.settings = settings self.__dict__.update(**kw) super(Portlet, self).__init__() @property def title(self): return self.settings.get('title', getattr(self.portlet_adapter, 'title', u'')) @property def description(self): return self.settings.get('description', getattr(self.portlet_adapter, 'description', u'')) @property def settings(self): return self.__settings__ @settings.setter def settings(self, value): self.__settings__.clear() self.__settings__.update(value) @property def schema_factory(self): return self.portlet_adapter.schema_factory @property def portlet_adapter(self): reg = get_current_registry() return reg.getAdapter(self, IPortletType, name = self.portlet_type) @property def slot(self): try: return self.__parent__.slot except AttributeError: pass def render(self, context, request, view, **kw): try: return self.portlet_adapter.render(context, request, view, **kw) except ComponentLookupError: _logger.error("portlet %r not found for context %r" % (self.portlet_type, context)) return ""
class UpgradeRegistry(object): """Registry of upgrade steps, by profile. Registry keys are profile ids. Each registry value is a nested mapping: - id -> step for single steps - id -> [ (id1, step1), (id2, step2) ] for nested steps """ def __init__(self): self._registry = OOBTree() def __getitem__(self, key): return self._registry.get(key) def keys(self): return self._registry.keys() def clear(self): self._registry.clear() def getUpgradeStepsForProfile(self, profile_id): """Return the upgrade steps mapping for a given profile, or None if there are no steps registered for a profile matching that id. """ profile_steps = self._registry.get(profile_id, None) if profile_steps is None: self._registry[profile_id] = OOBTree() profile_steps = self._registry.get(profile_id) return profile_steps def getUpgradeStep(self, profile_id, step_id): """Returns the specified upgrade step for the specified profile, or None if it doesn't exist. """ profile_steps = self._registry.get(profile_id, None) if profile_steps is not None: step = profile_steps.get(step_id, None) if step is None: for key in profile_steps.keys(): if type(profile_steps[key]) == list: subs = dict(profile_steps[key]) step = subs.get(step_id, None) if step is not None: break elif type(step) == list: subs = dict(step) step = subs.get(step_id, None) return step
class UsedPasswordStorage(Persistent): """A local utility for storing a list of previously used passwords. """ implements(IUsedPasswordStorage) def __init__(self): self._user_passwords = OOBTree() def isPasswordUsed(self, login, password, history_size=0): """Query password store to see if password has been previously used. """ for hash in self.getPasswordsForUser(login, history_size): if AuthEncoding.pw_validate(hash, password): log.info("Password '%s' for user '%s' not valid (already used)" % (password, login)) return True log.info("Password '%s' for user '%s' valid" % (password, login)) return False def getPasswordsForUser(self, login, history_size=0): """Return a list of previously used paswords for a user. """ hashes = self._user_passwords.get(login, [])[-history_size:] return hashes def setPasswordForUser(self, login, password): """Add password to the list of previously used passwords for a user. """ hashes = self._user_passwords.get(login, []) hash = AuthEncoding.pw_encrypt(password) hashes.append(hash) self._user_passwords[login] = hashes log.info("Password '%s' for user '%s' stored" % (password, login)) def clearPasswordsForUser(self, login): """Remove stored passwords for a user. """ del self._user_passwords[login] def clearAllPasswords(self): """Remove stored passwords for all users. """ self._user_passwords.clear()
class SharedIndex(Persistent, Contained): implements(zope.catalog.interfaces.ICatalogIndex) def __init__(self): Persistent.__init__(self) Contained.__init__(self) self.uids = OOBTree() self.data = OOBTree() def get(self, docid, key): uid = self.uids.get(docid) if uid is None: return None return self.data.get((uid, key)) def __contains__(self, docid_key): docid, key = docid_key if docid not in self.uids: return False return (self.uids[docid], key) in self.data def index_doc(self, docid, link): self.uids[docid] = get_link_shared_uid(link) for key, value in link.shared.items(): self.data[self.uids[docid], key] = value def unindex_doc(self, docid): if docid not in self.uids: return unindex_uid = self.uids[docid] tounindex = set() for uid, key in self.data: if uid == unindex_uid: tounindex.add((uid, key)) for idx in tounindex: del self.data[idx] def clear(self): self.data.clear() self.uids.clear() def apply(query): raise NotImplemented('querying this index is not supported')
class Root(Content, LocalRolesMixin, DCMetadataMixin, ContextACLMixin): type_name = "Root" type_title = _("Site root") add_permission = "Add %s" % type_name search_visible = False is_permanent = True def __init__(self, data=None, **kwargs): self.catalog = Catalog() self.document_map = DocumentMap() populate_catalog(self.catalog) self.__site_settings__ = OOBTree() super(Root, self).__init__(data=data, **kwargs) @property def site_settings(self): return getattr(self, "__site_settings__", {}) @site_settings.setter def site_settings(self, value): self.__site_settings__.clear() self.__site_settings__.update(value)
class Root(Content, LocalRolesMixin, DCMetadataMixin, ContextACLMixin): type_name = u"Root" type_title = _(u"Site root") add_permission = "Add %s" % type_name search_visible = False is_permanent = True def __init__(self, data=None, **kwargs): self.catalog = Catalog() self.document_map = DocumentMap() populate_catalog(self.catalog) self.__site_settings__ = OOBTree() super(Root, self).__init__(data=data, **kwargs) @property def site_settings(self): return getattr(self, '__site_settings__', {}) @site_settings.setter def site_settings(self, value): self.__site_settings__.clear() self.__site_settings__.update(value)
class XMPPPasswordStorage(Persistent): implements(IXMPPPasswordStorage) def __init__(self): self._passwords = OOBTree() def get(self, user_id): if user_id in self._passwords: return self._passwords[user_id] return None def set(self, user_id): password = ''.join([random.choice(chars) for i in range(12)]) self._passwords[user_id] = password return password def remove(self, user_id): if user_id in self._passwords: del self._passwords[user_id] def clear(self): self._passwords.clear()
class XMPPPasswordStorage(Persistent): implements(IXMPPPasswordStorage) def __init__(self): self._passwords = OOBTree() def get(self, user_id): if user_id in self._passwords: return self._passwords[user_id] return None def set(self, user_id): password = ''.join([random.choice(chars) for i in range(12)]) self._passwords[user_id] = password return password def remove(self, user_id): if user_id in self._passwords: del self._passwords[user_id] def clear(self): self._passwords.clear() logger.warning("The password storage has been wiped.")
class fsIndex(object): def __init__(self, data=None): self._data = OOBTree() if data: self.update(data) def __getstate__(self): return dict(state_version=1, _data=[(k, v.toString()) for (k, v) in self._data.iteritems()]) def __setstate__(self, state): version = state.pop("state_version", 0) getattr(self, "_setstate_%s" % version)(state) def _setstate_0(self, state): self.__dict__.clear() self.__dict__.update(state) def _setstate_1(self, state): self._data = OOBTree([(k, fsBucket().fromString(v)) for (k, v) in state["_data"]]) def __getitem__(self, key): return str2num(self._data[key[:6]][key[6:]]) def save(self, pos, fname): with open(fname, "wb") as f: pickler = cPickle.Pickler(f, 1) pickler.fast = True pickler.dump(pos) for k, v in self._data.iteritems(): pickler.dump((k, v.toString())) pickler.dump(None) @classmethod def load(class_, fname): with open(fname, "rb") as f: unpickler = cPickle.Unpickler(f) pos = unpickler.load() if not isinstance(pos, (int, long)): return pos # Old format index = class_() data = index._data while 1: v = unpickler.load() if not v: break k, v = v data[k] = fsBucket().fromString(v) return dict(pos=pos, index=index) def get(self, key, default=None): tree = self._data.get(key[:6], default) if tree is default: return default v = tree.get(key[6:], default) if v is default: return default return str2num(v) def __setitem__(self, key, value): value = num2str(value) treekey = key[:6] tree = self._data.get(treekey) if tree is None: tree = fsBucket() self._data[treekey] = tree tree[key[6:]] = value def __delitem__(self, key): treekey = key[:6] tree = self._data.get(treekey) if tree is None: raise KeyError, key del tree[key[6:]] if not tree: del self._data[treekey] def __len__(self): r = 0 for tree in self._data.itervalues(): r += len(tree) return r def update(self, mapping): for k, v in mapping.items(): self[k] = v def has_key(self, key): v = self.get(key, self) return v is not self def __contains__(self, key): tree = self._data.get(key[:6]) if tree is None: return False v = tree.get(key[6:], None) if v is None: return False return True def clear(self): self._data.clear() def __iter__(self): for prefix, tree in self._data.iteritems(): for suffix in tree: yield prefix + suffix iterkeys = __iter__ def keys(self): return list(self.iterkeys()) def iteritems(self): for prefix, tree in self._data.iteritems(): for suffix, value in tree.iteritems(): yield (prefix + suffix, str2num(value)) def items(self): return list(self.iteritems()) def itervalues(self): for tree in self._data.itervalues(): for value in tree.itervalues(): yield str2num(value) def values(self): return list(self.itervalues()) # Comment below applies for the following minKey and maxKey methods # # Obscure: what if `tree` is actually empty? We're relying here on # that this class doesn't implement __delitem__: once a key gets # into an fsIndex, the only way it can go away is by invoking # clear(). Therefore nothing in _data.values() is ever empty. # # Note that because `tree` is an fsBTree, its minKey()/maxKey() methods are # very efficient. def minKey(self, key=None): if key is None: smallest_prefix = self._data.minKey() else: smallest_prefix = self._data.minKey(key[:6]) tree = self._data[smallest_prefix] assert tree if key is None: smallest_suffix = tree.minKey() else: try: smallest_suffix = tree.minKey(key[6:]) except ValueError: # 'empty tree' (no suffix >= arg) next_prefix = prefix_plus_one(smallest_prefix) smallest_prefix = self._data.minKey(next_prefix) tree = self._data[smallest_prefix] assert tree smallest_suffix = tree.minKey() return smallest_prefix + smallest_suffix def maxKey(self, key=None): if key is None: biggest_prefix = self._data.maxKey() else: biggest_prefix = self._data.maxKey(key[:6]) tree = self._data[biggest_prefix] assert tree if key is None: biggest_suffix = tree.maxKey() else: try: biggest_suffix = tree.maxKey(key[6:]) except ValueError: # 'empty tree' (no suffix <= arg) next_prefix = prefix_minus_one(biggest_prefix) biggest_prefix = self._data.maxKey(next_prefix) tree = self._data[biggest_prefix] assert tree biggest_suffix = tree.maxKey() return biggest_prefix + biggest_suffix
class LinkSet(Persistent, Contained): """Set of links. This class is used internally to represent relationships. Initially it is empty >>> linkset = LinkSet() >>> list(linkset) [] You can add new links to it >>> from schooltool.relationship.tests import URIStub >>> link1 = Link('example:Group', object(), URIStub('example:Member'), ... URIStub('example:Membership')) >>> link2 = Link('example:Friend', object(), URIStub('example:Friend'), ... URIStub('example:Friendship')) >>> linkset.add(link1) >>> linkset.add(link2) The links have landed in the cache too: >>> expected = { ... 'example:Member': [link1], ... 'example:Friend': [link2]} >>> dict(linkset._byrole.items()) == expected True Let's zap the cache and call getCachedLinksByRole(), which should restore it: >>> del linkset._byrole >>> linkset.getCachedLinksByRole(URIStub('something')) [] >>> dict(linkset._byrole.items()) == expected True Links should get named: >>> link1.__name__ '1' >>> link2.__name__ '2' We can access our links through their names: >>> linkset['1'] is link1 True >>> linkset['2'] is link2 True And get parents set: >>> link1.__parent__ is linkset True We got them in the container now: >>> set(linkset) == set([link1, link2]) # order is not preserved True You can look for links for a specific relationship >>> linkset.find('example:Group', ... link1.target, ... URIStub('example:Member'), ... URIStub('example:Membership')) is link1 True We can't add same link into the container twice: >>> linkset.add(link1) # doctest: +ELLIPSIS Traceback (most recent call last): ... ValueError: ... If find fails, it raises ValueError, just like list.index. >>> linkset.find('example:Member', link1.target, ... URIStub('example:Group'), ... URIStub('example:Membership')) # doctest: +ELLIPSIS Traceback (most recent call last): ... ValueError: ... You can remove links >>> linkset.remove(link2) >>> set(linkset) == set([link1]) True The links are removed from the cache too: >>> list(linkset._byrole.keys()) ['example:Member'] If you try to remove a link that is not in the set, you will get a ValueError. >>> linkset.remove(link2) # doctest: +ELLIPSIS Traceback (most recent call last): ... ValueError: ... You can remove all links >>> linkset.clear() >>> set(linkset) == set([]) True The cache has been cleared too: >>> len(linkset._byrole) 0 The class is documented in IRelationshipLinks >>> from zope.interface.verify import verifyObject >>> verifyObject(IRelationshipLinks, linkset) True """ implements(IRelationshipLinks) _lids = None def __init__(self): self._lids = IFBTree.TreeSet() self._links = OOBTree() @property def catalog(self): return getLinkCatalog() def getCachedLinksByRole(self, role, catalog=None): """Get a set of links by role.""" if catalog is None: catalog = self.catalog lids = self.query(role=role, catalog=catalog) return [CLink(catalog, lid) for lid in lids] def getCachedLinksByTarget(self, target, catalog=None): if catalog is None: catalog = self.catalog lids = self.query(target=target, catalog=catalog) return [CLink(catalog, lid) for lid in lids] def add(self, link): if link.__parent__ == self: raise ValueError("You are adding same link twice.") i = 1 while "%s" % i in self._links: i += 1 link.__name__ = "%s" % i self._links[link.__name__] = link link.__parent__ = self notify(ObjectAddedEvent(link, self._links, link.__name__)) def remove(self, link): if link is self._links.get(link.__name__): link_name = link.__name__ self._lids.remove(getUtility(IIntIds).getId(link)) del self._links[link.__name__] notify(ObjectRemovedEvent(link, self._links, link_name)) else: raise ValueError("This link does not belong to this container!") def clear(self): deleted = list(self._links.items()) self._links.clear() self._byrole.clear() for name, link in deleted: notify(ObjectRemovedEvent(link, self._links, name)) def __iter__(self): return iter(self._links.values()) def find(self, my_role, target, role, rel_type): for link in self._links.values(): if (link.role_hash == hash(role) and link.target is target and link.rel_type_hash == hash(rel_type) and link.my_role_hash == hash(my_role)): return link else: raise ValueError(my_role, target, role, rel_type) def __getitem__(self, id): return self._links[id] def get(self, key, default=None): return self._links.get(key, default) def query(self, my_role=None, target=None, role=None, rel_type=None, catalog=None): if catalog is None: catalog = self.catalog empty = IFBTree.TreeSet() this_hash = hash_persistent(self.__parent__) result = None if my_role is not None: ids = catalog['my_role_hash'].values_to_documents.get( (hash(my_role), this_hash), empty) if result is None: result = ids else: result = IFBTree.intersection(result, ids) if not result: return result if target is not None: ids = catalog['target'].values_to_documents.get( (IKeyReference(target), this_hash), empty) if result is None: result = ids else: result = IFBTree.intersection(result, ids) if not result: return result if role is not None: ids = catalog['role_hash'].values_to_documents.get( (hash(role), this_hash), empty) if result is None: result = ids else: result = IFBTree.intersection(result, ids) if not result: return result if rel_type is not None: ids = catalog['rel_type_hash'].values_to_documents.get( (hash(rel_type), this_hash), empty) if result is None: result = ids else: result = IFBTree.intersection(result, ids) return result def iterLinksByRole(self, role, rel_type=None, catalog=None): if catalog is None: catalog = self.catalog lids = self.query(role=role, rel_type=rel_type, catalog=catalog) if rel_type is None: filters = {} for lid in lids: link = CLink(catalog, lid) if link.rel_type_hash not in filters: filters[link.rel_type_hash] = link.rel_type.filter if filters[link.rel_type_hash](link): yield link else: filter = rel_type.filter for lid in lids: link = CLink(catalog, lid) if filter(link): yield link def getTargetsByRole(self, role, rel_type=None, catalog=None): links = self.iterLinksByRole(role, rel_type=rel_type, catalog=catalog) return [link.target for link in links] def iterTargetsByRole(self, role, rel_type=None, catalog=None): for link in self.iterLinksByRole(role, rel_type=rel_type, catalog=catalog): yield link.target
class fsIndex(object): def __init__(self, data=None): self._data = OOBTree() if data: self.update(data) def __getstate__(self): return dict( state_version = 1, _data = [(k, v.toString()) for (k, v) in six.iteritems(self._data) ] ) def __setstate__(self, state): version = state.pop('state_version', 0) getattr(self, '_setstate_%s' % version)(state) def _setstate_0(self, state): self.__dict__.clear() self.__dict__.update(state) self._data = OOBTree([ (ensure_bytes(k), v) for (k, v) in self._data.items() ]) def _setstate_1(self, state): self._data = OOBTree([ (ensure_bytes(k), fsBucket().fromString(ensure_bytes(v))) for (k, v) in state['_data'] ]) def __getitem__(self, key): assert isinstance(key, bytes) return str2num(self._data[key[:6]][key[6:]]) def save(self, pos, fname): with open(fname, 'wb') as f: pickler = Pickler(f, _protocol) pickler.fast = True pickler.dump(pos) for k, v in six.iteritems(self._data): pickler.dump((k, v.toString())) pickler.dump(None) @classmethod def load(class_, fname): with open(fname, 'rb') as f: unpickler = Unpickler(f) pos = unpickler.load() if not isinstance(pos, INT_TYPES): # NB: this might contain OIDs that got unpickled # into Unicode strings on Python 3; hope the caller # will pipe the result to fsIndex().update() to normalize # the keys return pos # Old format index = class_() data = index._data while 1: v = unpickler.load() if not v: break k, v = v data[ensure_bytes(k)] = fsBucket().fromString(ensure_bytes(v)) return dict(pos=pos, index=index) def get(self, key, default=None): assert isinstance(key, bytes) tree = self._data.get(key[:6], default) if tree is default: return default v = tree.get(key[6:], default) if v is default: return default return str2num(v) def __setitem__(self, key, value): assert isinstance(key, bytes) value = num2str(value) treekey = key[:6] tree = self._data.get(treekey) if tree is None: tree = fsBucket() self._data[treekey] = tree tree[key[6:]] = value def __delitem__(self, key): assert isinstance(key, bytes) treekey = key[:6] tree = self._data.get(treekey) if tree is None: raise KeyError(key) del tree[key[6:]] if not tree: del self._data[treekey] def __len__(self): r = 0 for tree in six.itervalues(self._data): r += len(tree) return r def update(self, mapping): for k, v in mapping.items(): self[ensure_bytes(k)] = v def has_key(self, key): v = self.get(key, self) return v is not self def __contains__(self, key): assert isinstance(key, bytes) tree = self._data.get(key[:6]) if tree is None: return False v = tree.get(key[6:], None) if v is None: return False return True def clear(self): self._data.clear() def __iter__(self): for prefix, tree in six.iteritems(self._data): for suffix in tree: yield prefix + suffix iterkeys = __iter__ def keys(self): return list(self.iterkeys()) def iteritems(self): for prefix, tree in six.iteritems(self._data): for suffix, value in six.iteritems(tree): yield (prefix + suffix, str2num(value)) def items(self): return list(self.iteritems()) def itervalues(self): for tree in six.itervalues(self._data): for value in six.itervalues(tree): yield str2num(value) def values(self): return list(self.itervalues()) # Comment below applies for the following minKey and maxKey methods # # Obscure: what if `tree` is actually empty? We're relying here on # that this class doesn't implement __delitem__: once a key gets # into an fsIndex, the only way it can go away is by invoking # clear(). Therefore nothing in _data.values() is ever empty. # # Note that because `tree` is an fsBTree, its minKey()/maxKey() methods are # very efficient. def minKey(self, key=None): if key is None: smallest_prefix = self._data.minKey() else: smallest_prefix = self._data.minKey(key[:6]) tree = self._data[smallest_prefix] assert tree if key is None: smallest_suffix = tree.minKey() else: try: smallest_suffix = tree.minKey(key[6:]) except ValueError: # 'empty tree' (no suffix >= arg) next_prefix = prefix_plus_one(smallest_prefix) smallest_prefix = self._data.minKey(next_prefix) tree = self._data[smallest_prefix] assert tree smallest_suffix = tree.minKey() return smallest_prefix + smallest_suffix def maxKey(self, key=None): if key is None: biggest_prefix = self._data.maxKey() else: biggest_prefix = self._data.maxKey(key[:6]) tree = self._data[biggest_prefix] assert tree if key is None: biggest_suffix = tree.maxKey() else: try: biggest_suffix = tree.maxKey(key[6:]) except ValueError: # 'empty tree' (no suffix <= arg) next_prefix = prefix_minus_one(biggest_prefix) biggest_prefix = self._data.maxKey(next_prefix) tree = self._data[biggest_prefix] assert tree biggest_suffix = tree.maxKey() return biggest_prefix + biggest_suffix
class fsIndex(object): def __init__(self): self._data = OOBTree() def __getitem__(self, key): return str2num(self._data[key[:6]][key[6:]]) def get(self, key, default=None): tree = self._data.get(key[:6], default) if tree is default: return default v = tree.get(key[6:], default) if v is default: return default return str2num(v) def __setitem__(self, key, value): value = num2str(value) treekey = key[:6] tree = self._data.get(treekey) if tree is None: tree = fsBucket() self._data[treekey] = tree tree[key[6:]] = value def __len__(self): r = 0 for tree in self._data.itervalues(): r += len(tree) return r def update(self, mapping): for k, v in mapping.items(): self[k] = v def has_key(self, key): v = self.get(key, self) return v is not self def __contains__(self, key): tree = self._data.get(key[:6]) if tree is None: return False v = tree.get(key[6:], None) if v is None: return False return True def clear(self): self._data.clear() def __iter__(self): for prefix, tree in self._data.iteritems(): for suffix in tree: yield prefix + suffix iterkeys = __iter__ def keys(self): return list(self.iterkeys()) def iteritems(self): for prefix, tree in self._data.iteritems(): for suffix, value in tree.iteritems(): yield (prefix + suffix, str2num(value)) def items(self): return list(self.iteritems()) def itervalues(self): for tree in self._data.itervalues(): for value in tree.itervalues(): yield str2num(value) def values(self): return list(self.itervalues()) # Comment below applies for the following minKey and maxKey methods # # Obscure: what if `tree` is actually empty? We're relying here on # that this class doesn't implement __delitem__: once a key gets # into an fsIndex, the only way it can go away is by invoking # clear(). Therefore nothing in _data.values() is ever empty. # # Note that because `tree` is an fsBTree, its minKey()/maxKey() methods are # very efficient. def minKey(self, key=None): if key is None: smallest_prefix = self._data.minKey() else: smallest_prefix = self._data.minKey(key[:6]) tree = self._data[smallest_prefix] assert tree if key is None: smallest_suffix = tree.minKey() else: try: smallest_suffix = tree.minKey(key[6:]) except ValueError: # 'empty tree' (no suffix >= arg) next_prefix = prefix_plus_one(smallest_prefix) smallest_prefix = self._data.minKey(next_prefix) tree = self._data[smallest_prefix] assert tree smallest_suffix = tree.minKey() return smallest_prefix + smallest_suffix def maxKey(self, key=None): if key is None: biggest_prefix = self._data.maxKey() else: biggest_prefix = self._data.maxKey(key[:6]) tree = self._data[biggest_prefix] assert tree if key is None: biggest_suffix = tree.maxKey() else: try: biggest_suffix = tree.maxKey(key[6:]) except ValueError: # 'empty tree' (no suffix <= arg) next_prefix = prefix_minus_one(biggest_prefix) biggest_prefix = self._data.maxKey(next_prefix) tree = self._data[biggest_prefix] assert tree biggest_suffix = tree.maxKey() return biggest_prefix + biggest_suffix
class fsIndex(object): def __init__(self): self._data = OOBTree() def __getitem__(self, key): return str2num(self._data[key[:6]][key[6:]]) def get(self, key, default=None): tree = self._data.get(key[:6], default) if tree is default: return default v = tree.get(key[6:], default) if v is default: return default return str2num(v) def __setitem__(self, key, value): value = num2str(value) treekey = key[:6] tree = self._data.get(treekey) if tree is None: tree = fsBucket() self._data[treekey] = tree tree[key[6:]] = value def __delitem__(self, key): treekey = key[:6] tree = self._data.get(treekey) if tree is None: raise KeyError, key del tree[key[6:]] if not tree: del self._data[treekey] def __len__(self): r = 0 for tree in self._data.itervalues(): r += len(tree) return r def update(self, mapping): for k, v in mapping.items(): self[k] = v def has_key(self, key): v = self.get(key, self) return v is not self def __contains__(self, key): tree = self._data.get(key[:6]) if tree is None: return False v = tree.get(key[6:], None) if v is None: return False return True def clear(self): self._data.clear() def __iter__(self): for prefix, tree in self._data.iteritems(): for suffix in tree: yield prefix + suffix iterkeys = __iter__ def keys(self): return list(self.iterkeys()) def iteritems(self): for prefix, tree in self._data.iteritems(): for suffix, value in tree.iteritems(): yield (prefix + suffix, str2num(value)) def items(self): return list(self.iteritems()) def itervalues(self): for tree in self._data.itervalues(): for value in tree.itervalues(): yield str2num(value) def values(self): return list(self.itervalues()) # Comment below applies for the following minKey and maxKey methods # # Obscure: what if `tree` is actually empty? We're relying here on # that this class doesn't implement __delitem__: once a key gets # into an fsIndex, the only way it can go away is by invoking # clear(). Therefore nothing in _data.values() is ever empty. # # Note that because `tree` is an fsBTree, its minKey()/maxKey() methods are # very efficient. def minKey(self, key=None): if key is None: smallest_prefix = self._data.minKey() else: smallest_prefix = self._data.minKey(key[:6]) tree = self._data[smallest_prefix] assert tree if key is None: smallest_suffix = tree.minKey() else: try: smallest_suffix = tree.minKey(key[6:]) except ValueError: # 'empty tree' (no suffix >= arg) next_prefix = prefix_plus_one(smallest_prefix) smallest_prefix = self._data.minKey(next_prefix) tree = self._data[smallest_prefix] assert tree smallest_suffix = tree.minKey() return smallest_prefix + smallest_suffix def maxKey(self, key=None): if key is None: biggest_prefix = self._data.maxKey() else: biggest_prefix = self._data.maxKey(key[:6]) tree = self._data[biggest_prefix] assert tree if key is None: biggest_suffix = tree.maxKey() else: try: biggest_suffix = tree.maxKey(key[6:]) except ValueError: # 'empty tree' (no suffix <= arg) next_prefix = prefix_minus_one(biggest_prefix) biggest_prefix = self._data.maxKey(next_prefix) tree = self._data[biggest_prefix] assert tree biggest_suffix = tree.maxKey() return biggest_prefix + biggest_suffix
class _Records(object): """The records stored in the registry. This implements dict-like access to records, where as the Registry object implements dict-like read-only access to values. """ __parent__ = None # Similar to zope.schema._field._isdotted, but allows up to one '/' _validkey = re.compile(r"([a-zA-Z][a-zA-Z0-9_-]*)" r"([.][a-zA-Z][a-zA-Z0-9_-]*)*" r"([/][a-zA-Z][a-zA-Z0-9_-]*)?" r"([.][a-zA-Z][a-zA-Z0-9_-]*)*" # use the whole line r"$").match def __init__(self, parent): self.__parent__ = parent self._fields = OOBTree() self._values = OOBTree() def __setitem__(self, name, record): if not self._validkey(name): raise InvalidRegistryKey(record) if not IRecord.providedBy(record): raise ValueError("Value must be a record") self._setField(name, record.field) self._values[name] = record.value record.__name__ = name record.__parent__ = self.__parent__ notify(RecordAddedEvent(record)) def __delitem__(self, name): record = self[name] # unbind the record so that it won't attempt to look up values from # the registry anymore record.__parent__ = None del self._fields[name] del self._values[name] notify(RecordRemovedEvent(record)) def __getitem__(self, name): field = self._getField(name) value = self._values[name] record = Record(field, value, _validate=False) record.__name__ = name record.__parent__ = self.__parent__ return record def get(self, name, default=None): try: return self[name] except KeyError: return default def __nonzero__(self): return self._values.__nonzero__() def __len__(self): return self._values.__len__() def __iter__(self): return self._values.__iter__() def has_key(self, name): return self._values.__contains__(name) def __contains__(self, name): return self._values.__contains__(name) def keys(self, min=None, max=None): return self._values.keys(min, max) def maxKey(self, key=None): return self._values.maxKey(key) def minKey(self, key=None): return self._values.minKey(key) def values(self, min=None, max=None): return [self[name] for name in self.keys(min, max)] def items(self, min=None, max=None): return [( name, self[name], ) for name in self.keys(min, max)] def setdefault(self, key, value): if key not in self: self[key] = value return self[key] def clear(self): self._fields.clear() self._values.clear() # Helper methods def _getField(self, name): field = self._fields[name] # Handle field reference pointers if isinstance(field, basestring): recordName = field while isinstance(field, basestring): recordName = field field = self._fields[recordName] field = FieldRef(recordName, field) return field def _setField(self, name, field): if not IPersistentField.providedBy(field): raise ValueError("The record's field must be an IPersistentField.") if IFieldRef.providedBy(field): if field.recordName not in self._fields: raise ValueError( "Field reference points to non-existent record") self._fields[name] = field.recordName # a pointer, of sorts else: field.__name__ = 'value' self._fields[name] = field
class channel_state: def __init__(self): self.id = 0 self.mix_options = OOBTree() self.song_queue = OOBTree() self.song_queue_pos = 0 self.player: StreamPlayer def set_id(self, id: int): self.id = id def add_option(self, option: mix_option): try: option_index = self.mix_options.maxKey() + 1 except ValueError: option_index = 0 mix_option.id = option_index print('inserting option: ' + str(option_index)) self.mix_options.insert(option_index, option) return option_index def clear_options(self): self.mix_options.clear() def empty_queue(self): self.song_queue.clear() self.reset_queue_pos() def add_queue(self, song: Song): try: song_index = self.song_queue.maxKey() + 1 except ValueError: song_index = 0 print('inserting song: ' + str(song_index)) self.song_queue.insert(song_index, song) return song_index def current_queue_item(self): return self.song_queue[self.song_queue_pos] def increment_queue_pos(self): self.song_queue_pos = self.song_queue_pos + 1 def reset_queue_pos(self): self.song_queue_pos = 0 def has_next_song(self): if self.song_queue.maxKey() > self.song_queue_pos: return True else: return False def play_next(self): if self.current_queue_item().player.error: print("bad stuff happened.") print(self.current_queue_item().player.error) if self.has_next_song(): self.increment_queue_pos() next_song = self.current_queue_item() next_player: StreamPlayer = next_song.player next_player.start() else: self.empty_queue() def play_now(self): current_player: StreamPlayer = self.current_queue_item().player print('playing : ' + str(self.song_queue_pos) + ' ' + str(current_player.is_alive())) if current_player.is_alive() != True: print('starting : ' + str(self.song_queue_pos)) current_player.start() def pause(self): current_player: StreamPlayer = self.current_queue_item().player current_player.pause() def resume(self): current_player: StreamPlayer = self.current_queue_item().player current_player.resume() def stop(self): current_player: StreamPlayer = self.current_queue_item().player current_player.stop()
class _Records(object): """The records stored in the registry. This implements dict-like access to records, where as the Registry object implements dict-like read-only access to values. """ __parent__ = None # Similar to zope.schema._field._isdotted, but allows up to one '/' _validkey = re.compile( r"([a-zA-Z][a-zA-Z0-9_-]*)" r"([.][a-zA-Z][a-zA-Z0-9_-]*)*" r"([/][a-zA-Z][a-zA-Z0-9_-]*)?" r"([.][a-zA-Z][a-zA-Z0-9_-]*)*" # use the whole line r"$").match def __init__(self, parent): self.__parent__ = parent self._fields = OOBTree() self._values = OOBTree() def __setitem__(self, name, record): if not self._validkey(name): raise InvalidRegistryKey(record) if not IRecord.providedBy(record): raise ValueError("Value must be a record") self._setField(name, record.field) self._values[name] = record.value record.__name__ = name record.__parent__ = self.__parent__ notify(RecordAddedEvent(record)) def __delitem__(self, name): record = self[name] # unbind the record so that it won't attempt to look up values from # the registry anymore record.__parent__ = None del self._fields[name] del self._values[name] notify(RecordRemovedEvent(record)) def __getitem__(self, name): field = self._getField(name) value = self._values[name] record = Record(field, value, _validate=False) record.__name__ = name record.__parent__ = self.__parent__ return record def get(self, name, default=None): try: return self[name] except KeyError: return default def __nonzero__(self): return self._values.__nonzero__() def __len__(self): return self._values.__len__() def __iter__(self): return self._values.__iter__() def has_key(self, name): return self._values.__contains__(name) def __contains__(self, name): return self._values.__contains__(name) def keys(self, min=None, max=None): return self._values.keys(min, max) def maxKey(self, key=None): return self._values.maxKey(key) def minKey(self, key=None): return self._values.minKey(key) def values(self, min=None, max=None): return [self[name] for name in self.keys(min, max)] def items(self, min=None, max=None): return [(name, self[name],) for name in self.keys(min, max)] def setdefault(self, key, value): if key not in self: self[key] = value return self[key] def clear(self): self._fields.clear() self._values.clear() # Helper methods def _getField(self, name): field = self._fields[name] # Handle field reference pointers if isinstance(field, basestring): recordName = field while isinstance(field, basestring): recordName = field field = self._fields[recordName] field = FieldRef(recordName, field) return field def _setField(self, name, field): if not IPersistentField.providedBy(field): raise ValueError("The record's field must be an IPersistentField.") if IFieldRef.providedBy(field): if field.recordName not in self._fields: raise ValueError( "Field reference points to non-existent record" ) self._fields[name] = field.recordName # a pointer, of sorts else: field.__name__ = 'value' self._fields[name] = field
class PendingList(object): """ Implementation of IPendingList Set up the pending list >>> from Products.listen.content import PendingList >>> plist = PendingList() Add a few pending members >>> plist.add('tom') >>> plist.add('*****@*****.**') >>> plist.add('mikey', time='2006-05-09', pin='4532123') >>> sorted(plist.get_user_emails()) ['*****@*****.**', 'mikey', 'tom'] The time that we set on mikey should be used instead of the default time >>> plist.get_pending_time('mikey') '2006-05-09' >>> plist.get_user_pin('mikey') '4532123' Try and add mikey a second time and make sure data is not lost but time is updated >>> plist.add('mikey') >>> plist.get_user_pin('mikey') '4532123' >>> plist.get_pending_time('mikey') != '2006-05-09' True Now let's remove them >>> plist.remove('tom') >>> plist.remove('*****@*****.**') >>> plist.remove('mikey') >>> plist.get_user_emails() [] Let's create an item with a post >>> plist.add('timmy', post='a new post') >>> post = plist.get_posts('timmy')[0] >>> post['header'] {} >>> post['body'] 'a new post' Verify the id of the post >>> post['postid'] 0 Let's add a new post, and verify its id too >>> plist.add('timmy', post='hi there') >>> newpost = plist.get_posts('timmy')[1] >>> newpost['postid'] 1 Remove the first one >>> plist.pop_post('timmy', 0) is not None True >>> p = plist.get_posts('timmy')[0] >>> p['body'] 'hi there' >>> p['postid'] 1 Trying to pop a fake post returns None >>> plist.pop_post('timmy', 0) is None True >>> plist.pop_post('timmy', 17) is None True """ implements(IPendingList) def __init__(self): self.pend = OOBTree() self.trust_caller = False def add(self, item, **values): self.pend.setdefault(item, OOBTree()) if 'time' not in values: if self.trust_caller: raise AssertionError("No time passed in: %s" % values) values['time'] = DateTime().ISO() if 'post' in values: post_list = self.pend[item].setdefault('post', IOBTree()) new_post = values['post'] if isinstance(new_post, basestring): new_post = dict(header={}, body=new_post) try: nextid = post_list.maxKey() + 1 except ValueError: nextid = 0 if self.trust_caller: assert 'postid' in new_post, new_post else: new_post['postid'] = nextid post_list[new_post['postid']] = new_post values.pop('post') self.pend[item].update(values) def remove(self, item): if item in self.pend: self.pend.pop(item) def pop_post(self, item, postid): posts = self.pend[item]['post'] try: return posts.pop(postid) except KeyError: return None def get_posts(self, user_email): return list(self.pend.get(user_email, {}).get('post', {}).values()) def is_pending(self, item): return item in self.pend def get_user_pin(self, user_email): return self.pend.get(user_email, {}).get('pin') def get_pending_time(self, user_email): return self.pend.get(user_email, {}).get('time') def get_user_emails(self): return list(self.pend.keys()) def get_user_name(self, user_email): return self.pend.get(user_email, {}).get('user_name') def clear(self): for email, item in self.pend.items(): if 'post' in item: for post in item['post'].values(): for k, v in item.items(): if k == 'post': continue post[k] = v post['email'] = email yield post else: item['email'] = email yield item self.pend.clear()
class NoDuplicateLogin(BasePlugin, Cacheable): """PAS plugin that rejects multiple logins with the same user at the same time, by forcing a logout of all but one user. If a user has max_seats > 1, then it will reject users after maximum seats are filled. """ meta_type = 'No Duplicate Login Plugin' cookie_name = '__noduplicate' DEBUG = False security = ClassSecurityInfo() login_member_data_mapping = None _properties = ( { 'id': 'title', 'label': 'Title', 'type': 'string', 'mode': 'w' }, { 'id': 'cookie_name', 'label': 'Cookie Name', 'type': 'string', 'mode': 'w' }, ) # UIDs older than 30 minutes are deleted from our storage; this can also be set per member data property (which default to 5 minutes)... if DEBUG: default_minutes_to_persist = 5 else: default_minutes_to_persist = 30 time_to_persist_cookies = datetime.timedelta( minutes=default_minutes_to_persist) # XXX I wish I had a better explanation for this, but disabling this makes # both the ZMI (basic auth) work and the NoDuplicateLogin work. # Otherwise, we get a traceback on basic auth. I suspect that means this # plugin needs to handle basic auth better but I'm not sure how or why. # Normally, we would prefer to see our exceptions. _dont_swallow_my_exceptions = False def __init__(self, id, title=None, cookie_name=''): self._id = self.id = id self.title = title if cookie_name: self.cookie_name = cookie_name self.mapping1 = OOBTree() # userid : { tokens:[ UID, UID, UID] } self.mapping2 = OOBTree( ) # UID : { userid: string, ip: string, startTime: DateTime, expireTime: DateTime } self.login_member_data_mapping = OOBTree( ) # userid : { maxSeats: integer, seatTimeoutInMinutes: float, expireTime: DateTime } self.plone_session = None # for plone.session security.declarePrivate('authenticateCredentials') def authenticateCredentials(self, credentials): """See IAuthenticationPlugin. This plugin will actually never authenticate. o We expect the credentials to be those returned by ILoginPasswordExtractionPlugin. """ request = self.REQUEST alsoProvides(request, IDisableCSRFProtection) response = request['RESPONSE'] pas_instance = self._getPAS() login = credentials.get('login') password = credentials.get('password') if None in (login, password, pas_instance) and ( credentials.get('source') != 'plone.session'): return None else: session_source = self.session ticket = credentials.get('cookie') if session_source._shared_secret is not None: ticket_data = tktauth.validateTicket( session_source._shared_secret, ticket, timeout=session_source.timeout, mod_auth_tkt=session_source.mod_auth_tkt) else: ticket_data = None manager = queryUtility(IKeyManager) if manager is None: return None for secret in manager[u"_system"]: if secret is None: continue ticket_data = tktauth.validateTicket( secret, ticket, timeout=session_source.timeout, mod_auth_tkt=session_source.mod_auth_tkt) if ticket_data is not None: break if ticket_data is None: return None (digest, userid, tokens, user_data, timestamp) = ticket_data pas = self._getPAS() info = pas._verifyUser(pas.plugins, user_id=userid) if info is None: return None login = info['login'] cookie_val = self.getCookie() # get max seats from member data property or cache and default to 1 if not set try: max_seats = self.getMaxSeatsForLogin(login) except: traceback.print_exc() # When debugging, print the maxSeats value that was resolved if self.DEBUG: print "authenticateCredentials():: Max Seats is " + str(max_seats) if max_seats == 1: if cookie_val: # A cookie value is there. If it's the same as the value # in our mapping, it's fine. Otherwise we'll force a # logout. existing = self.mapping1.get(login, None) if self.DEBUG: if existing: print "authenticateCredentials():: cookie_val is " + cookie_val + ", and active tokens are: " + ', '.join( existing['tokens']) if existing and cookie_val not in existing['tokens']: # The cookies values differ, we want to logout the # user by calling resetCredentials. Note that this # will eventually call our own resetCredentials which # will cleanup our own cookie. try: self.resetAllCredentials(request, response) pas_instance.plone_utils.addPortalMessage( _(u"Someone else logged in under your name. You have been \ logged out"), "error") except: traceback.print_exc() elif existing is None: # The browser has the cookie but we don't know about # it. Let's reset our own cookie: self.setCookie('') else: # When no cookie is present, we generate one, store it and # set it in the response: cookie_val = uuid() # do some cleanup in our mappings existing = self.mapping1.get(login) if existing and 'tokens' in existing: try: if existing['tokens'][0] in self.mapping2: del self.mapping2[existing['tokens'][0]] except: pass try: from_ip = self.get_ip(request) except: traceback.print_exc() now = DateTime() self.mapping1[login] = {'tokens': []} self.mapping1[login]['tokens'].append(cookie_val) self.mapping2[cookie_val] = { 'userid': login, 'ip': from_ip, 'startTime': now, 'expireTime': DateTime(now.asdatetime() + self.time_to_persist_cookies) } self.setCookie(cookie_val) else: # Max seats is not 1. Treat this as a floating licenses scenario. # Nobody is logged out, but once the max seats threshold is reached, # active tokens must expire before new users may log in. if cookie_val: # When the cookie value is there, try to verify it or activate it if is it not added yet self.verifyToken(cookie_val, login, max_seats, request, response) else: if self.DEBUG: print "authenticateCredentials:: Try to issue a token because there is no cookie value." # When no cookie is present, attempt to issue a token and use the cookie to store it self.issueToken(login, max_seats, request, response) # if max_seats are filled, then force logout if self.isLoginAtCapacity(login, max_seats): self.forceLogoutForUser(login, request, response) return None # Note that we never return anything useful security.declarePrivate('getSeatsPropertiesForLogin') def getSeatsPropertiesForLogin(self, login): # initialize max_seats at 1 max_seats = 1 seat_timeout = 5 # initialize to 5 minutes if self.login_member_data_mapping is None: self.login_member_data_mapping = OOBTree( ) # if this has not been initialized then do it now if self.DEBUG: print "Initialized the Login Member Data Mapping" # if the max_seats has a valid cached value, then use it cached_member_data = self.login_member_data_mapping.get(login, None) now = DateTime() if cached_member_data and 'expireTime' in cached_member_data and 'maxSeats' in cached_member_data and 'seatTimeoutInMinutes' in cached_member_data and now < cached_member_data[ 'expireTime']: max_seats = cached_member_data['maxSeats'] seat_timeout = cached_member_data['seatTimeoutInMinutes'] else: member = self.getMember(login) # get the max_seats property from the member data tool if member is not None: max_seats = member.getProperty("max_seats") seat_timeout = member.getProperty("seat_timeout_in_minutes") # cache the max_seats for login td_seat_timeout = datetime.timedelta(minutes=seat_timeout) self.login_member_data_mapping[login] = { 'maxSeats': int(max_seats), 'seatTimeoutInMinutes': float(seat_timeout), 'expireTime': DateTime(now.asdatetime() + td_seat_timeout) } return { 'maxSeats': int(max_seats), 'seatTimeoutInMinutes': float(seat_timeout) } def getMember(self, email): """ Returns a member object for the given username """ member = None try: member = api.user.get(username=email) except: if self.DEBUG: traceback.print_exc() return member security.declarePrivate('getMaxSeatsForLogin') def getMaxSeatsForLogin(self, login): """Returns the max_seats property for a given login """ seats_properties = self.getSeatsPropertiesForLogin(login) max_seats = 1 # default to 1 seat if seats_properties and 'maxSeats' in seats_properties: max_seats = seats_properties['maxSeats'] return max_seats security.declarePrivate('getSeatTimeoutInMinutesForLogin') def getSeatTimeoutInMinutesForLogin(self, login): """Returns the seat_timeout_in_minutes property for a given login """ seats_properties = self.getSeatsPropertiesForLogin(login) seat_timeout_in_minutes = 5 # default to 5 minutes if seats_properties and 'seatTimeoutInMinutes' in seats_properties: seat_timeout_in_minutes = seats_properties['seatTimeoutInMinutes'] return seat_timeout_in_minutes security.declarePrivate('resetCredentials') def resetCredentials(self, request, response): """See ICredentialsResetPlugin. """ alsoProvides(request, IDisableCSRFProtection) if self.DEBUG: print "resetCredentials()::" try: cookie_val = self.getCookie() if cookie_val: loginandinfo = self.mapping2.get(cookie_val, None) if loginandinfo: login = loginandinfo['userid'] del self.mapping2[cookie_val] existing = self.mapping1.get(login, None) if existing and 'tokens' in existing and cookie_val in existing[ 'tokens']: existing['tokens'].remove(cookie_val) assert cookie_val not in existing['tokens'] self.setCookie('') except: if self.DEBUG: traceback.print_exc() security.declarePrivate('resetAllCredentials') def resetAllCredentials(self, request, response): """Call resetCredentials of all plugins. o This is not part of any contract. """ # This is arguably a bit hacky, but calling # pas_instance.resetCredentials() will not do anything because # the user is still anonymous. (I think it should do # something nevertheless.) alsoProvides(request, IDisableCSRFProtection) pas_instance = self._getPAS() plugins = pas_instance._getOb('plugins') cred_resetters = plugins.listPlugins(ICredentialsResetPlugin) for resetter_id, resetter in cred_resetters: resetter.resetCredentials(request, response) security.declarePrivate('getCookie') def getCookie(self): """Helper to retrieve the cookie value from either cookie or session, depending on policy. """ request = self.REQUEST alsoProvides(request, IDisableCSRFProtection) cookie = request.get(self.cookie_name, '') if self.DEBUG: print "getCookie():: " + str(unquote(cookie)) return unquote(cookie) security.declarePrivate('setCookie') def setCookie(self, value): """Helper to set the cookie value to either cookie or session, depending on policy. o Setting to '' means delete. """ value = quote(value) request = self.REQUEST alsoProvides(request, IDisableCSRFProtection) response = request['RESPONSE'] if value: response.setCookie(self.cookie_name, value, path='/') else: response.expireCookie(self.cookie_name, path='/') if self.DEBUG: print "setCookie():: " + str(value) security.declarePrivate('clearSeatsPropertiesForLogin') def clearSeatsPropertiesForLogin(self, login): """ Clears the cached seats properties for the given user. """ isCached = self.login_member_data_mapping and self.login_member_data_mapping.get( login, None) is not None if isCached: del self.login_member_data_mapping[login] security.declarePrivate('clearStaleTokens') def clearStaleTokens(self, login): """Clear tokens that should be expired or that have no corresponding mapping and thus have been orphaned.""" if self.DEBUG: print "clearStaleTokens:: " + login existing = self.mapping1.get(login, None) if existing and 'tokens' in existing: # for each token, remove if stale for token in existing['tokens']: tokenInfo = self.mapping2.get(token, None) now = DateTime() # if the token info does not exist, then remove it from the active tokens if tokenInfo is None: if self.DEBUG: print "clearStaleTokens:: Remove token (%s) because it was orphaned." % ( token) # remove from the active tokens for the given login self.mapping1[login]['tokens'].remove(token) # if the expireTime for the token has passed, then expire the token if tokenInfo and 'expireTime' in tokenInfo and tokenInfo[ 'expireTime'] < now: if self.DEBUG: print "clearStaleTokens:: Remove token (%s) because expireTime(%s). startTime(%s)" % ( token, tokenInfo['expireTime'], tokenInfo['startTime']) # remove from the active tokens for the given login self.mapping1[login]['tokens'].remove(token) del self.mapping2[token] security.declarePrivate('clearAllTokensForUser') def clearAllTokensForUser(self, login): """Clear all tokens for a specific user.""" if self.DEBUG: print "clearAllTokensForUser:: " + login existing = self.mapping1.get(login, None) if existing and 'tokens' in existing: # for each token, remove if stale for token in existing['tokens']: tokenInfo = self.mapping2.get(token, None) now = DateTime() # remove it from the active tokens if self.DEBUG: print "clearAllTokensForUser:: Remove token (%s) because it was orphaned." % ( token) # remove from the active tokens for the given login self.mapping1[login]['tokens'].remove(token) # if there is also a corresponding mapping for tokenInfo, then delete the mapping if tokenInfo: del self.mapping2[token] security.declarePrivate('issueToken') def issueToken(self, login, max_seats, request, response): """ Creates a uid and stores in a cookie browser-side """ # When no cookie is present, we generate one, store it and # set it in the response: alsoProvides(request, IDisableCSRFProtection) cookie_val = uuid() if self.DEBUG: print "issueToken::" + cookie_val self.setCookie(cookie_val) security.declarePrivate('forceLogoutForUser') def forceLogoutForUser(self, login, request, response): """ Forces logout. """ # Logout the # user by calling resetCredentials. Note that this # will eventually call our own resetCredentials which # will cleanup our own cookie. alsoProvides(request, IDisableCSRFProtection) try: self.resetAllCredentials(request, response) self._getPAS().plone_utils.addPortalMessage( _(u"The maximum number of simultaneous logins for this user has been exceeded. You have been \ logged out."), "error") except: traceback.print_exc() security.declarePrivate('isLoginAtCapacity') def isLoginAtCapacity(self, login, max_seats): """ Returns whether or not the login has filled all available seats. """ # clear stale tokens to make sure we use the correct token count self.clearStaleTokens(login) seat_timeout = 5 # default if there is a problem with the member property iTokens = 0 # assume no tokens are active until proven otherwise existing = self.mapping1.get(login) if existing and 'tokens' in existing: iTokens = len(existing['tokens']) # return whether max_seats have been filled return iTokens >= max_seats security.declarePrivate('verifyToken') def verifyToken(self, token, login, max_seats, request, response): """ Activates a token by putting it in the tokens[] array of mapping1[login] if it is not already present. """ alsoProvides(request, IDisableCSRFProtection) isVerified = False # it is verified if it is already in the active tokens list server-side seat_timeout = 5 # default if there is a problem with the member property iTokens = 0 # assume no tokens are active until proven otherwise existing = self.mapping1.get(login) if existing and 'tokens' in existing: iTokens = len(existing['tokens']) isVerified = token in existing['tokens'] if self.DEBUG: print "authenticateCredentials():: cookie_val is " + token + ", and active tokens are: " + ', '.join( existing['tokens']) else: self.mapping1[login] = { 'tokens': [] } # initialize tokens array for this login if self.DEBUG: print "verifyToken:: login = %s, active = %i, max = %i" % ( login, iTokens, max_seats) try: # for seats > 1, use member property for cookie timeout value seat_timeout = self.getSeatTimeoutInMinutesForLogin(login) td_seat_timeout = datetime.timedelta(minutes=seat_timeout) except: pass # if this is the last token to issue, # then go ahead and clear stale tokens for this login if not isVerified and iTokens >= max_seats - 1: self.clearStaleTokens(login) try: from_ip = self.get_ip(request) except: traceback.print_exc() if isVerified: # just extend it now = DateTime() self.mapping2[token] = { 'userid': login, 'ip': from_ip, 'startTime': now, 'expireTime': DateTime(now.asdatetime() + td_seat_timeout) } if self.DEBUG: print "verifyToken:: logon= %s, IP= %s, startTime= %s, expireTime= %s" % ( self.mapping2.get(token)['userid'], from_ip, self.mapping2.get(token)['startTime'], self.mapping2.get(token)['expireTime']) elif iTokens < max_seats: now = DateTime() # if it already exists, add it self.mapping1[login]['tokens'].append(token) self.mapping2[token] = { 'userid': login, 'ip': from_ip, 'startTime': now, 'expireTime': DateTime(now.asdatetime() + td_seat_timeout) } if self.DEBUG: print "verifyToken:: after activate token, active tokens = " + ', '.join( self.mapping1[login]['tokens']) # since this was activated, just ensure that the cookie in the browser reflects what is server side self.setCookie(token) else: # cannot issue cookie, so clear in browser-side #self.setCookie('') # if the token is not able to be issued because of max_seats filled, # then force logout, and show the message # Logout the # user by calling resetCredentials. Note that this # will eventually call our own resetCredentials which # will cleanup our own cookie. try: self.resetAllCredentials(request, response) self._getPAS().plone_utils.addPortalMessage( _(u"The maximum number of simultaneous logins for this user has been exceeded. You have been \ logged out."), "error") except: traceback.print_exc() security.declareProtected(Permissions.manage_users, 'clearAllTokens') def clearAllTokens(self): """Clear all server side tokens. Use only in testing.""" if self.DEBUG: print "clearAllTokens():: called" try: self.mapping1.clear() self.mapping2.clear() self.setCookie('') except: traceback.print_exc() security.declareProtected(Permissions.manage_users, 'cleanUp') def cleanUp(self): """Clean up storage. Call this periodically through the web to clean up old entries in the storage.""" now = DateTime() def cleanStorage(mapping): count = 0 for key, obj in mapping.items(): # if this is not a dictionary, then it is a stale entry (could be tuple from old scheme) if not isinstance(obj, dict): del mapping[key] count += 1 elif 'expireTime' in obj and obj['expireTime'] < now: del mapping[key] # if the mapping2 deletes its token by UID, make sure that the mapping1 removes that token as well for userid, info in self.mapping1.items(): try: info['tokens'].remove( key ) # remove the UID from the tokens for that login except: pass count += 1 return count for mapping in self.mapping2, self.login_member_data_mapping: count = cleanStorage(mapping) return "%s entries deleted." % count security.declarePrivate(Permissions.manage_users, 'get_ip') def get_ip(self, request): """ Extract the client IP address from the HTTP request in a proxy-compatible way. @return: IP address as a string or None if not available""" if "HTTP_X_FORWARDED_FOR" in request.environ: # Virtual host ip = request.environ["HTTP_X_FORWARDED_FOR"] elif "HTTP_HOST" in request.environ: # Non-virtualhost ip = request.environ["REMOTE_ADDR"] else: # Should not reach here ip = '0.0.0.0' if self.DEBUG: print "get_ip:: " + ip return ip
class NoDuplicateLogin(BasePlugin, Cacheable): """PAS plugin that rejects multiple logins with the same user at the same time, by forcing a logout of all but one user. If a user has max_seats > 1, then it will reject users after maximum seats are filled. """ meta_type = 'No Duplicate Login Plugin' cookie_name = '__noduplicate' DEBUG = False security = ClassSecurityInfo() login_member_data_mapping = None _properties = ( {'id': 'title', 'label': 'Title', 'type': 'string', 'mode': 'w'}, {'id': 'cookie_name', 'label': 'Cookie Name', 'type': 'string', 'mode': 'w'}, ) # UIDs older than 30 minutes are deleted from our storage; this can also be set per member data property (which default to 5 minutes)... if DEBUG: default_minutes_to_persist = 5 else: default_minutes_to_persist = 30 time_to_persist_cookies = datetime.timedelta(minutes=default_minutes_to_persist) # XXX I wish I had a better explanation for this, but disabling this makes # both the ZMI (basic auth) work and the NoDuplicateLogin work. # Otherwise, we get a traceback on basic auth. I suspect that means this # plugin needs to handle basic auth better but I'm not sure how or why. # Normally, we would prefer to see our exceptions. _dont_swallow_my_exceptions = False def __init__(self, id, title=None, cookie_name=''): self._id = self.id = id self.title = title if cookie_name: self.cookie_name = cookie_name self.mapping1 = OOBTree() # userid : { tokens:[ UID, UID, UID] } self.mapping2 = OOBTree() # UID : { userid: string, ip: string, startTime: DateTime, expireTime: DateTime } self.login_member_data_mapping = OOBTree() # userid : { maxSeats: integer, seatTimeoutInMinutes: float, expireTime: DateTime } self.plone_session = None # for plone.session security.declarePrivate('authenticateCredentials') def authenticateCredentials(self, credentials): """See IAuthenticationPlugin. This plugin will actually never authenticate. o We expect the credentials to be those returned by ILoginPasswordExtractionPlugin. """ request = self.REQUEST alsoProvides(request, IDisableCSRFProtection) response = request['RESPONSE'] pas_instance = self._getPAS() login = credentials.get('login') password = credentials.get('password') if None in (login, password, pas_instance) and ( credentials.get('source') != 'plone.session'): return None else: session_source = self.session ticket = credentials.get('cookie') if session_source._shared_secret is not None: ticket_data = tktauth.validateTicket( session_source._shared_secret, ticket, timeout=session_source.timeout, mod_auth_tkt=session_source.mod_auth_tkt) else: ticket_data = None manager = queryUtility(IKeyManager) if manager is None: return None for secret in manager[u"_system"]: if secret is None: continue ticket_data = tktauth.validateTicket(secret, ticket, timeout=session_source.timeout, mod_auth_tkt=session_source.mod_auth_tkt) if ticket_data is not None: break if ticket_data is None: return None (digest, userid, tokens, user_data, timestamp) = ticket_data pas = self._getPAS() info = pas._verifyUser(pas.plugins, user_id=userid) if info is None: return None login = info['login'] cookie_val = self.getCookie() # get max seats from member data property or cache and default to 1 if not set try: max_seats = self.getMaxSeatsForLogin(login) except: traceback.print_exc() # When debugging, print the maxSeats value that was resolved if self.DEBUG: print "authenticateCredentials():: Max Seats is " + str( max_seats ) if max_seats == 1: if cookie_val: # A cookie value is there. If it's the same as the value # in our mapping, it's fine. Otherwise we'll force a # logout. existing = self.mapping1.get(login, None) if self.DEBUG: if existing: print "authenticateCredentials():: cookie_val is " + cookie_val + ", and active tokens are: " + ', '.join( existing['tokens'] ) if existing and cookie_val not in existing['tokens']: # The cookies values differ, we want to logout the # user by calling resetCredentials. Note that this # will eventually call our own resetCredentials which # will cleanup our own cookie. try: self.resetAllCredentials(request, response) pas_instance.plone_utils.addPortalMessage(_( u"Someone else logged in under your name. You have been \ logged out"), "error") except: traceback.print_exc() elif existing is None: # The browser has the cookie but we don't know about # it. Let's reset our own cookie: self.setCookie('') else: # When no cookie is present, we generate one, store it and # set it in the response: cookie_val = uuid() # do some cleanup in our mappings existing = self.mapping1.get(login) if existing and 'tokens' in existing: try: if existing['tokens'][0] in self.mapping2: del self.mapping2[existing['tokens'][0]] except: pass try: from_ip = self.get_ip( request ) except: traceback.print_exc() now = DateTime() self.mapping1[login] = { 'tokens':[] } self.mapping1[login]['tokens'].append( cookie_val ) self.mapping2[cookie_val] = {'userid': login, 'ip': from_ip, 'startTime': now, 'expireTime': DateTime( now.asdatetime() + self.time_to_persist_cookies )} self.setCookie(cookie_val) else: # Max seats is not 1. Treat this as a floating licenses scenario. # Nobody is logged out, but once the max seats threshold is reached, # active tokens must expire before new users may log in. if cookie_val: # When the cookie value is there, try to verify it or activate it if is it not added yet self.verifyToken( cookie_val, login, max_seats, request, response ) else: if self.DEBUG: print "authenticateCredentials:: Try to issue a token because there is no cookie value." # When no cookie is present, attempt to issue a token and use the cookie to store it self.issueToken(login, max_seats, request, response) # if max_seats are filled, then force logout if self.isLoginAtCapacity(login, max_seats): self.forceLogoutForUser(login, request, response) return None # Note that we never return anything useful security.declarePrivate('getSeatsPropertiesForLogin') def getSeatsPropertiesForLogin(self, login): # initialize max_seats at 1 max_seats = 1 seat_timeout = 5 # initialize to 5 minutes if self.login_member_data_mapping is None: self.login_member_data_mapping = OOBTree() # if this has not been initialized then do it now if self.DEBUG: print "Initialized the Login Member Data Mapping" # if the max_seats has a valid cached value, then use it cached_member_data = self.login_member_data_mapping.get(login, None) now = DateTime() if cached_member_data and 'expireTime' in cached_member_data and 'maxSeats' in cached_member_data and 'seatTimeoutInMinutes' in cached_member_data and now < cached_member_data['expireTime']: max_seats = cached_member_data['maxSeats'] seat_timeout = cached_member_data['seatTimeoutInMinutes'] else: member = self.getMember(login) # get the max_seats property from the member data tool if member is not None: max_seats = member.getProperty("max_seats") seat_timeout = member.getProperty("seat_timeout_in_minutes") # cache the max_seats for login td_seat_timeout = datetime.timedelta(minutes=seat_timeout) self.login_member_data_mapping[login] = { 'maxSeats': int( max_seats ), 'seatTimeoutInMinutes': float( seat_timeout ), 'expireTime': DateTime( now.asdatetime() + td_seat_timeout )} return { 'maxSeats': int( max_seats ), 'seatTimeoutInMinutes': float( seat_timeout ) } def getMember(self, email): """ Returns a member object for the given username """ member = None # try to get the member if it exists mtool = getToolByName(self, 'portal_membership') memberInfo = mtool.searchMembers('email', email) # try by email first try: memberId = memberInfo[0]['username'] member = mtool.getMemberById( memberId ) except: member = None pass if member == None: memberInfo = mtool.searchMembers('username', email) # try by username if not found by email (should be same but sometimes are not) try: memberId = memberInfo[0]['username'] member = mtool.getMemberById( memberId ) except: member = None pass # If the member was not found by email or username, then check the actual login_name stored in acl_users # Try to find this user via the login name. self.portal = getSite() acl = self.portal.acl_users if member == None: userids = [user.get('userid') for user in acl.searchUsers(login=email, exact_match=False) if user.get('userid')] for userid in userids: memberRecord = mtool.getMemberById(userid) # must check that the login name is the same without regards to case since exact_match may return even partial matches if memberRecord is not None and email.lower() == memberRecord.getProperty('email').lower(): member = memberRecord break return member security.declarePrivate('getMaxSeatsForLogin') def getMaxSeatsForLogin(self, login): """Returns the max_seats property for a given login """ seats_properties = self.getSeatsPropertiesForLogin(login) max_seats = 1 # default to 1 seat if seats_properties and 'maxSeats' in seats_properties: max_seats = seats_properties['maxSeats'] return max_seats security.declarePrivate('getSeatTimeoutInMinutesForLogin') def getSeatTimeoutInMinutesForLogin(self, login): """Returns the seat_timeout_in_minutes property for a given login """ seats_properties = self.getSeatsPropertiesForLogin(login) seat_timeout_in_minutes = 5 # default to 5 minutes if seats_properties and 'seatTimeoutInMinutes' in seats_properties: seat_timeout_in_minutes = seats_properties['seatTimeoutInMinutes'] return seat_timeout_in_minutes security.declarePrivate('resetCredentials') def resetCredentials(self, request, response): """See ICredentialsResetPlugin. """ alsoProvides(request, IDisableCSRFProtection) if self.DEBUG: print "resetCredentials()::" try: cookie_val = self.getCookie() if cookie_val: loginandinfo = self.mapping2.get(cookie_val, None) if loginandinfo: login = loginandinfo['userid'] del self.mapping2[cookie_val] existing = self.mapping1.get(login, None) if existing and 'tokens' in existing and cookie_val in existing['tokens']: existing['tokens'].remove(cookie_val) assert cookie_val not in existing['tokens'] self.setCookie('') except: if self.DEBUG: traceback.print_exc() security.declarePrivate('resetAllCredentials') def resetAllCredentials(self, request, response): """Call resetCredentials of all plugins. o This is not part of any contract. """ # This is arguably a bit hacky, but calling # pas_instance.resetCredentials() will not do anything because # the user is still anonymous. (I think it should do # something nevertheless.) alsoProvides(request, IDisableCSRFProtection) pas_instance = self._getPAS() plugins = pas_instance._getOb('plugins') cred_resetters = plugins.listPlugins(ICredentialsResetPlugin) for resetter_id, resetter in cred_resetters: resetter.resetCredentials(request, response) security.declarePrivate('getCookie') def getCookie(self): """Helper to retrieve the cookie value from either cookie or session, depending on policy. """ request = self.REQUEST alsoProvides(request, IDisableCSRFProtection) cookie = request.get(self.cookie_name, '') if self.DEBUG: print "getCookie():: " + str(unquote(cookie)) return unquote(cookie) security.declarePrivate('setCookie') def setCookie(self, value): """Helper to set the cookie value to either cookie or session, depending on policy. o Setting to '' means delete. """ value = quote(value) request = self.REQUEST alsoProvides(request, IDisableCSRFProtection) response = request['RESPONSE'] if value: response.setCookie(self.cookie_name, value, path='/') else: response.expireCookie(self.cookie_name, path='/') if self.DEBUG: print "setCookie():: " + str(value) security.declarePrivate('clearSeatsPropertiesForLogin') def clearSeatsPropertiesForLogin(self, login): """ Clears the cached seats properties for the given user. """ isCached = self.login_member_data_mapping and self.login_member_data_mapping.get(login, None) is not None if isCached: del self.login_member_data_mapping[login] security.declarePrivate('clearStaleTokens') def clearStaleTokens(self, login): """Clear tokens that should be expired or that have no corresponding mapping and thus have been orphaned.""" if self.DEBUG: print "clearStaleTokens:: " + login existing = self.mapping1.get(login, None) if existing and 'tokens' in existing: # for each token, remove if stale for token in existing['tokens']: tokenInfo = self.mapping2.get( token, None ) now = DateTime() # if the token info does not exist, then remove it from the active tokens if tokenInfo is None: if self.DEBUG: print "clearStaleTokens:: Remove token (%s) because it was orphaned." % (token) # remove from the active tokens for the given login self.mapping1[login]['tokens'].remove(token) # if the expireTime for the token has passed, then expire the token if tokenInfo and 'expireTime' in tokenInfo and tokenInfo['expireTime'] < now: if self.DEBUG: print "clearStaleTokens:: Remove token (%s) because expireTime(%s). startTime(%s)" % (token, tokenInfo['expireTime'], tokenInfo['startTime'] ) # remove from the active tokens for the given login self.mapping1[login]['tokens'].remove(token) del self.mapping2[token] security.declarePrivate('clearAllTokensForUser') def clearAllTokensForUser(self, login): """Clear all tokens for a specific user.""" if self.DEBUG: print "clearAllTokensForUser:: " + login existing = self.mapping1.get(login, None) if existing and 'tokens' in existing: # for each token, remove if stale for token in existing['tokens']: tokenInfo = self.mapping2.get( token, None ) now = DateTime() # remove it from the active tokens if self.DEBUG: print "clearAllTokensForUser:: Remove token (%s) because it was orphaned." % (token) # remove from the active tokens for the given login self.mapping1[login]['tokens'].remove(token) # if there is also a corresponding mapping for tokenInfo, then delete the mapping if tokenInfo: del self.mapping2[token] security.declarePrivate('issueToken') def issueToken(self, login, max_seats, request, response): """ Creates a uid and stores in a cookie browser-side """ # When no cookie is present, we generate one, store it and # set it in the response: alsoProvides(request, IDisableCSRFProtection) cookie_val = uuid() if self.DEBUG: print "issueToken::" + cookie_val self.setCookie(cookie_val) security.declarePrivate('forceLogoutForUser') def forceLogoutForUser(self, login, request, response): """ Forces logout. """ # Logout the # user by calling resetCredentials. Note that this # will eventually call our own resetCredentials which # will cleanup our own cookie. alsoProvides(request, IDisableCSRFProtection) try: self.resetAllCredentials(request, response) self._getPAS().plone_utils.addPortalMessage(_( u"The maximum number of simultaneous logins for this user has been exceeded. You have been \ logged out."), "error") except: traceback.print_exc() security.declarePrivate('isLoginAtCapacity') def isLoginAtCapacity(self, login, max_seats): """ Returns whether or not the login has filled all available seats. """ # clear stale tokens to make sure we use the correct token count self.clearStaleTokens(login) seat_timeout = 5 # default if there is a problem with the member property iTokens = 0 # assume no tokens are active until proven otherwise existing = self.mapping1.get(login) if existing and 'tokens' in existing: iTokens = len( existing['tokens'] ) # return whether max_seats have been filled return iTokens >= max_seats security.declarePrivate('verifyToken') def verifyToken(self, token, login, max_seats, request, response): """ Activates a token by putting it in the tokens[] array of mapping1[login] if it is not already present. """ alsoProvides(request, IDisableCSRFProtection) isVerified = False # it is verified if it is already in the active tokens list server-side seat_timeout = 5 # default if there is a problem with the member property iTokens = 0 # assume no tokens are active until proven otherwise existing = self.mapping1.get(login) if existing and 'tokens' in existing: iTokens = len( existing['tokens'] ) isVerified = token in existing['tokens'] if self.DEBUG: print "authenticateCredentials():: cookie_val is " + token + ", and active tokens are: " + ', '.join( existing['tokens'] ) else: self.mapping1[login] = { 'tokens':[] } # initialize tokens array for this login if self.DEBUG: print "verifyToken:: login = %s, active = %i, max = %i" % (login, iTokens, max_seats) try: # for seats > 1, use member property for cookie timeout value seat_timeout = self.getSeatTimeoutInMinutesForLogin(login) td_seat_timeout = datetime.timedelta(minutes=seat_timeout) except: pass # if this is the last token to issue, # then go ahead and clear stale tokens for this login if not isVerified and iTokens >= max_seats - 1: self.clearStaleTokens(login) try: from_ip = self.get_ip(request) except: traceback.print_exc() if isVerified: # just extend it now = DateTime() self.mapping2[token] = {'userid': login, 'ip': from_ip, 'startTime': now, 'expireTime': DateTime( now.asdatetime() + td_seat_timeout )} if self.DEBUG: print "verifyToken:: logon= %s, IP= %s, startTime= %s, expireTime= %s" % ( self.mapping2.get(token)['userid'], from_ip, self.mapping2.get(token)['startTime'], self.mapping2.get(token)['expireTime'] ) elif iTokens < max_seats: now = DateTime() # if it already exists, add it self.mapping1[login]['tokens'].append( token ) self.mapping2[token] = {'userid': login, 'ip': from_ip, 'startTime': now, 'expireTime': DateTime( now.asdatetime() + td_seat_timeout )} if self.DEBUG: print "verifyToken:: after activate token, active tokens = " + ', '.join(self.mapping1[login]['tokens']) # since this was activated, just ensure that the cookie in the browser reflects what is server side self.setCookie( token ) else: # cannot issue cookie, so clear in browser-side #self.setCookie('') # if the token is not able to be issued because of max_seats filled, # then force logout, and show the message # Logout the # user by calling resetCredentials. Note that this # will eventually call our own resetCredentials which # will cleanup our own cookie. try: self.resetAllCredentials(request, response) self._getPAS().plone_utils.addPortalMessage(_( u"The maximum number of simultaneous logins for this user has been exceeded. You have been \ logged out."), "error") except: traceback.print_exc() security.declareProtected(Permissions.manage_users, 'clearAllTokens') def clearAllTokens(self): """Clear all server side tokens. Use only in testing.""" if self.DEBUG: print "clearAllTokens():: called" try: self.mapping1.clear() self.mapping2.clear() self.setCookie('') except: traceback.print_exc() security.declareProtected(Permissions.manage_users, 'cleanUp') def cleanUp(self): """Clean up storage. Call this periodically through the web to clean up old entries in the storage.""" now = DateTime() def cleanStorage(mapping): count = 0 for key, obj in mapping.items(): # if this is not a dictionary, then it is a stale entry (could be tuple from old scheme) if not isinstance( obj, dict ): del mapping[key] count += 1 elif 'expireTime' in obj and obj['expireTime'] < now: del mapping[key] # if the mapping2 deletes its token by UID, make sure that the mapping1 removes that token as well for userid, info in self.mapping1.items(): try: info['tokens'].remove(key) # remove the UID from the tokens for that login except: pass count += 1 return count for mapping in self.mapping2, self.login_member_data_mapping: count = cleanStorage(mapping) return "%s entries deleted." % count security.declarePrivate(Permissions.manage_users, 'get_ip') def get_ip(self, request): """ Extract the client IP address from the HTTP request in a proxy-compatible way. @return: IP address as a string or None if not available""" if "HTTP_X_FORWARDED_FOR" in request.environ: # Virtual host ip = request.environ["HTTP_X_FORWARDED_FOR"] elif "HTTP_HOST" in request.environ: # Non-virtualhost ip = request.environ["REMOTE_ADDR"] else: # Should not reach here ip = '0.0.0.0' if self.DEBUG: print "get_ip:: " + ip return ip
class UdbBtreeMultivaluedIndex(UdbIndex): is_ranged = True is_multivalued = True is_prefixed = True is_sorted_asc = True type = 'btree_multivalued' def __init__(self, schema, name=None): from BTrees.OOBTree import OOBTree UdbIndex.__init__(self, schema, name) self._btree = OOBTree() def __len__(self): return len(self._btree) def clear(self): self._btree.clear() return self def delete(self, key, uid): old_existing = self._btree.get(key, EMPTY) if old_existing != EMPTY and uid in old_existing: if len(old_existing) == 1: self._btree.pop(key) else: old_existing.remove(uid) return self def insert(self, key, uid): old_existing = self._btree.get(key, EMPTY) if old_existing == EMPTY: self._btree.insert(key, {uid}) else: old_existing.add(uid) return self def search_by_key(self, key): val = self._btree.get(key, EMPTY) if val != EMPTY: for _ in val: yield _ def search_by_key_in(self, keys): for key in keys: val = self._btree.get(key, EMPTY) if val != EMPTY: for _ in val: yield _ def search_by_key_prefix(self, key): for val in self._btree.values(key, key + CHAR255): for _ in val: yield _ def search_by_key_prefix_in(self, keys): for key in keys: for val in self._btree.values(key, key + CHAR255): for _ in val: yield _ def search_by_key_range(self, gte=None, lte=None, gte_excluded=False, lte_excluded=False): for val in self._btree.values(gte, lte, gte_excluded, lte_excluded): for _ in val: yield _ def upsert(self, old, new, uid): if old != new: old_existing = self._btree.get(old, EMPTY) if old_existing != EMPTY and uid in old_existing: if len(old_existing) == 1: self._btree.pop(old) else: old_existing.remove(uid) new_existing = self._btree.get(new, EMPTY) if new_existing == EMPTY: self._btree.insert(new, {uid}) else: new_existing.add(uid) return self