def __init__(self): self._quips = OOBTree() self._followed = OOBTree() self._followed_tags = OOBTree() # AppendStack defaults seem too low for search to make sense self._recent = AppendStack(max_layers=20, max_length=500) self._archive = Archive()
class ShortURLStorage(Persistent): """Stores short codes and urls to redirect to. """ implements(IShortURLStorage) def __init__(self): self._map = OOBTree() def add(self, short, target): self._map[short] = target def remove(self, short): if self._map.has_key(short): del self._map[short] def get(self, short, default=None): return self._map.get(short, default) def suggest(self): try: key = self._map.maxKey() except ValueError: # If the tree is empty return 'AAAAA' return _increment(key) def __getitem__(self, key): return self._map.items()[key] def __len__(self): return len(self._map)
class PersitentOOBTree(Persistent): """A persitent wrapper around a OOBTree""" def __init__(self): self._data = OOBTree() Persistent.__init__(self) self.__len = Length() @Lazy def _PersitentOOBTree__len(self): l = Length() ol = len(self._data) if ol > 0: l.change(ol) self._p_changed = True return l def __len__(self): return self.__len() def __setitem__(self, key, value): # make sure our lazy property gets set l = self.__len self._data[key] = value l.change(1) def __delitem__(self, key): # make sure our lazy property gets set l = self.__len del self._data[key] l.change(-1) def __iter__(self): return iter(self._data) def __getitem__(self, key): """See interface `IReadContainer`. """ return self._data[key] def get(self, key, default=None): """See interface `IReadContainer`. """ return self._data.get(key, default) def __contains__(self, key): """See interface `IReadContainer`. """ return key in self._data has_key = __contains__ def items(self, key=None): return self._data.items(key) def keys(self, key=None): return self._data.keys(key) def values(self, key=None): return self._data.values(key)
def __init__(self): """Setup our data structures""" self._anon_ratings = IOBTree() self._ratings = OOBTree() self._sessions = OOBTree() self._length = Length() self._anon_length = Length()
class RecentActivityUtility(object): """ Utility for recent activities """ grok.implements(IRecentActivity) def __init__(self): self.activities = OOBTree() def add_activity(self, timestamp, action, user, obj, parent): """ Add an activity to the BTree storage """ timestamp = int(time.time()) activity = {'action': action, 'user': user, 'object': obj, 'object_url': obj.absolute_url(), 'parent': parent, 'parent_url': parent.absolute_url(), } self.activities.insert(timestamp, activity) return timestamp def get_recent_activity(self, items=None): """ Get the activities stored in the BTree """ if self.activities: if items: return sorted(self.activities.items(), reverse=True)[:items] else: return sorted(self.activities.items(), reverse=True)
class BookingManagerConferenceIndex(Persistent): def __init__(self): self._tree = OOBTree() self._name = "bookingManagerByConf" def initialize(self, dbi=None): for i, conf in enumerate(ConferenceHolder()._getIdx().itervalues()): # Store CSBookingManager in the index csbm = getattr(conf, "_CSBookingManager", None) if csbm is None: csbm = CSBookingManager(conf) self.index(conf.getId(), csbm) if dbi and i % 1000 == 999: dbi.commit() if dbi: dbi.commit() def getName(self): return self._name def index(self, conf, csbm): if not self._tree.has_key(conf): self._tree[conf] = csbm self._tree._p_changed = 1 def unindex(self, conf): del self._tree[conf] self._tree._p_changed = 1 def get(self, conf): return self._tree.get(conf,None) def dump(self): return [(k, s) for k, s in self._tree.iteritems()]
class DeployedStore(Persistent): """ Deployed entries storage """ implements(IDeployedStore) store_length = 10 def __init__(self): self._entries = OOBTree() self.busy = False def __iter__(self): return reversed([i for i in self._entries.itervalues()]) def add(self, date, user, action, clear, full, status, errmsg=None): entry = DeployedEntry(date, user, action, clear, full, status, errmsg) self._entries[entry.id] = entry if len(self._entries) > self.store_length: del self._entries[self._entries.minKey()] def remove(self, id): del self._entries[id] def get(self, id): return self._entries[id] @iteration_with_status(DEPLOYMENT_STATUS.DONE) def done(self): pass @iteration_with_status(DEPLOYMENT_STATUS.ERROR) def error(self): pass
def __init__(self, id, title=None): self._id = self.id = id self.title = title self._roles = OOBTree() self._principal_roles = OOBTree()
class BookingManagerConferenceIndex(Persistent): def __init__(self): self._tree = OOBTree() self._name = "bookingManagerByConf" def initialize(self, dbi=None): pass def getName(self): return self._name def index(self, conf, csbm): if not self._tree.has_key(conf): self._tree[conf] = csbm self._tree._p_changed = 1 def unindex(self, conf): del self._tree[conf] self._tree._p_changed = 1 def get(self, conf): return self._tree.get(conf,None) def dump(self): return [(k, s) for k, s in self._tree.iteritems()]
def __init__(self, migrate_from=None): if migrate_from is not None: self._by_address = migrate_from._by_address self._by_project = migrate_from._by_project else: self._by_address = OOBTree() self._by_project = OOBTree()
def __setitem__(self, key, value): checkObject(self, key, value) value.__parent__ = self value.__name__ = key alsoProvides(value, IContained) OOBTree.__setitem__(self, key, value) notify(events.RelationAddedEvent(value))
class Emojiable(Entity): def __init__(self, **kwargs): super(Emojiable, self).__init__(**kwargs) self.emojis = OOBTree() self.users_emoji = OOBTree() def add_emoji(self, emoji, user): user_oid = get_oid(user) current_emoji = self.get_user_emoji(user) if current_emoji: self.remove_emoji(current_emoji, user) if emoji: self.emojis.setdefault(emoji, PersistentList()) self.emojis[emoji].append(user_oid) self.users_emoji[user_oid] = emoji def remove_emoji(self, emoji, user): user_oid = get_oid(user) if emoji in self.emojis and \ user_oid in self.emojis[emoji]: self.emojis[emoji].remove(user_oid) self.users_emoji.pop(user_oid) def get_user_emoji(self, user): user_oid = get_oid(user) return self.users_emoji.get(user_oid, None) def can_add_reaction(self, user, process): return False
def objects(self): """Returns the data to create the sitemap.""" catalog = getToolByName(self.context, 'portal_catalog') query = {} utils = getToolByName(self.context, 'plone_utils') query['portal_type'] = utils.getUserFriendlyTypes() ptool = getToolByName(self, 'portal_properties') siteProperties = getattr(ptool, 'site_properties') typesUseViewActionInListings = frozenset( siteProperties.getProperty('typesUseViewActionInListings', []) ) is_plone_site_root = IPloneSiteRoot.providedBy(self.context) if not is_plone_site_root: query['path'] = '/'.join(self.context.getPhysicalPath()) query['is_default_page'] = True default_page_modified = OOBTree() for item in catalog.searchResults(query, Language='all'): key = item.getURL().rsplit('/', 1)[0] value = (item.modified.micros(), item.modified.ISO8601()) default_page_modified[key] = value # The plone site root is not catalogued. if is_plone_site_root: loc = self.context.absolute_url() date = self.context.modified() # Comparison must be on GMT value modified = (date.micros(), date.ISO8601()) default_modified = default_page_modified.get(loc, None) if default_modified is not None: modified = max(modified, default_modified) lastmod = modified[1] yield { 'loc': loc, 'lastmod': lastmod, #'changefreq': 'always', # hourly/daily/weekly/monthly/yearly/never #'prioriy': 0.5, # 0.0 to 1.0 } query['is_default_page'] = False for item in catalog.searchResults(query, Language='all'): loc = item.getURL() date = item.modified # Comparison must be on GMT value modified = (date.micros(), date.ISO8601()) default_modified = default_page_modified.get(loc, None) if default_modified is not None: modified = max(modified, default_modified) lastmod = modified[1] if item.portal_type in typesUseViewActionInListings: loc += '/view' yield { 'loc': loc, 'lastmod': lastmod, #'changefreq': 'always', # hourly/daily/weekly/monthly/yearly/never #'prioriy': 0.5, # 0.0 to 1.0 }
class UIDMappingStorage(Persistent): """Stores a mapping between remote uids and local uids. """ implements(IUIDMappingStorage) def __init__(self): self._uidmap = OOBTree() def add(self, site_id, remote_uid, local_uid): if not site_id or not remote_uid or not local_uid: return self._uidmap[(site_id, remote_uid)] = local_uid def remove(self, site_id, remote_uid): del self._uidmap[(site_id, remote_uid)] def has_remote_uid(self, site_id, remote_uid): return bool(self._uidmap.has_key((site_id, remote_uid))) def get(self, site_id, remote_uid, default=None): return self._uidmap.get((site_id, remote_uid), default) def __iter__(self): return iter(self._uidmap)
class RuleSet(Content): type_title = _("Ruleset") _referenced_questions = frozenset() choice_scores = None nav_visible = False @property def referenced_questions(self): return self._referenced_questions @referenced_questions.setter def referenced_questions(self, value): self._referenced_questions = frozenset(value) def __init__(self, **kw): super(RuleSet, self).__init__(**kw) self.choice_scores = OOBTree() def set_choice_score(self, question, choice, score): q_cluster = _question_by_type_or_id(question) c_cluster = _choice_by_type_or_id(choice) assert isinstance(score, int) choices = self.choice_scores.setdefault(q_cluster, OIBTree()) choices[c_cluster] = score def get_choice_score(self, question, choice, default = None): q_cluster = _question_by_type_or_id(question) c_cluster = _choice_by_type_or_id(choice) return self.choice_scores.get(q_cluster, {}).get(c_cluster, default)
class FieldTemplate(Folder): """ Field information """ implements(IFieldTemplate) def __init__(self, title=u"", description=u"", field_type=u""): self._storage = OOBTree() super(FieldTemplate, self).__init__() self.title = title self.description = description self.field_type = field_type def get_title(self): return self._storage.get('title', '') def set_title(self, value): self._storage['title'] = value title = property(get_title, set_title) def get_description(self): return self._storage.get('description', '') def set_description(self, value): self._storage['description'] = value description = property(get_description, set_description) def get_field_type(self): return self._storage.get('field_type', '') def set_field_type(self, value): self._storage['field_type'] = value field_type = property(get_field_type, set_field_type) def get_node(self, context, request, name = None, **kw): if name is None: name = self.__name__ fa = request.registry.getAdapter(self, IFieldAdapter, name = self.field_type) return fa.get_node(context, request, name, title = self.title, description = self.description, **kw)
class MobileImageShortURLStorage(ShortURLStorage): """ utility to keep track of shortened urls for mobile images """ implements(IMobileImageShortURLStorage) def __init__(self): self._map = OOBTree() self._reverse_map = OOBTree() def add(self, short, target): self._map[short] = target self._reverse_map[target] = short def remove(self, short): if self._map.has_key(short): target = self.get(short) del self._map[short] del self._reverse_map[target] def get(self, short, default=None): return self._map.get(short, default) def getkey(self, url, default=None): return self._reverse_map.get(url, default)
def __init__(self, id, title=None, total_shib=False): super(ShibbolethHelper, self).__init__() self._id = self.id = id self.title = title self.total_shib = total_shib self.log(INFO,'Initilizing Shibboleth Authentication.') self.login_path = "login" self.role_mapping = PersistentMapping() self.log(INFO,'Role Mapping. %s' % self.role_mapping) self.group_mapping = PersistentMapping() self.log(INFO,'Group Mapping. %s' % self.group_mapping) self._mapping_map = {Constants.RoleM: self.role_mapping, Constants.GroupM:self.group_mapping} self.__setup_compiled_func_map() # Shibboleth attributes store self.store = OOBTree() # Shibboleth attributes map self.attr_map = OOBTree() self.rattr_map = OOBTree() # Default Values for attribute map self.attr_map['HTTP_DISPLAYNAME'] = 'fullname' self.attr_map['HTTP_MAIL'] = 'email' self.rattr_map['fullname'] = 'HTTP_DISPLAYNAME' self.rattr_map['fullname_fallback'] = 'HTTP_CN' self.rattr_map['email'] = 'HTTP_MAIL' #Properties for the Property Manager. self.max_brackets = 6 self.userid_attribute = 'HTTP_SHARED_TOKEN' self.idp_attribute = 'HTTP_SHIB_IDENTITY_PROVIDER' self.shibboleth_config_dir = '/etc/shibboleth' self.sso_url = '/Shibboleth.sso/DS'
class OfflineEventsModule(Module): """ This module holds all the information needed to keep the creation process of the offline version of the events. That means all the news items and other related information. """ id = "offlineEvents" _offlineEventTypes = {"Queued": L_("Queued"), "Generated": L_("Generated"), "Failed": L_("Failed"), "Expired": L_("Expired")} def __init__(self): self._idxConf = OOBTree() self._offlineEventCounter = Counter() def getOfflineEventIndex(self): return self._idxConf def getOfflineEventByConfId(self, confId): return self._idxConf.get(confId, []) def getOfflineEventByFileId(self, confId, fileId): offline_request_list = self._idxConf.get(confId, []) for req in offline_request_list: if req.id == fileId: return req return None def addOfflineEvent(self, offlineEvent): confId = offlineEvent.conference.getId() if not self._idxConf.has_key(confId): lst = [] self._idxConf[confId] = lst else: lst = self._idxConf[confId] offlineEvent.id = self._offlineEventCounter.newCount() lst.append(offlineEvent) self._idxConf[confId] = lst def removeOfflineEvent(self, offlineEvent, del_file=False): if offlineEvent: confId = offlineEvent.conference.getId() lst = self._idxConf.get(confId,[]) if offlineEvent in lst: lst.remove(offlineEvent) self._idxConf[confId] = lst if del_file: self.removeOfflineFile(offlineEvent) else: raise Exception(_("OfflineEvent does not exist")) def removeOfflineFile(self, offlineEvent): filepath = offlineEvent.file.getFilePath() if os.path.isfile(filepath): os.remove(filepath) offlineEvent.status = "Expired" @classmethod def getOfflineEventTypes(self): return OfflineEventsModule._offlineEventTypes
class XMPPPasswordStorage(Persistent): implements(IXMPPPasswordStorage) def __init__(self): self._passwords = OOBTree() def get(self, user_id): if user_id in self._passwords: return self._passwords[user_id] return None def set(self, user_id): password = ''.join([random.choice(chars) for i in range(12)]) self._passwords[user_id] = password return password def remove(self, user_id): if user_id in self._passwords: del self._passwords[user_id] def clear(self): self._passwords.clear() #class PubSubStorage(object): # # implements(IPubSubStorage) # # def __init__(self): # self.items = dict() # self.node_items = dict() # self.collections = dict() # self.leaf_nodes = [] # self.publishers = dict() # self.comments = dict() # # def itemsFromNodes(self, nodes, start=0, count=20): # if not isinstance(nodes, list): # nodes = [nodes] # all_items = [self.node_items[node] # for node in nodes # if node in self.node_items] # ids = sorted(itertools.chain(*all_items), # key=lambda item_id: self.items[item_id]['updated'], reverse=True) # return [self.items[item_id] for item_id in ids[start:count + start]] # # def getItemById(self, item_id): # return self.items.get(item_id) # # def getNodeByItemId(self, item_id): # for node in self.leaf_nodes: # if item_id in self.node_items[node]: # return node # # def getCommentsForItemId(self, item_id): # if item_id not in self.comments: # return [] # return [self.items[iid] for iid in self.comments[item_id]]
def __init__(self, id, title=None): self._id = self.id = id self.title = title self._user_passwords = OOBTree() self._login_to_userid = OOBTree() self._userid_to_login = OOBTree()
def __init__(self, **kwargs): super(Question, self).__init__(**kwargs) self.set_data(kwargs) self.addtoproperty('channels', Channel()) self.selected_options = OOBTree() self.users_options = OOBTree() self.urls = PersistentDict({}) self.len_answers = 0
class DoubleIndex(Index): def __init__( self, name='' ): if name != '': self._name = name self._words = OOBTree() self._ids = OOBTree() def _addItem( self, value, item ): if value != "": words = self._words if words.has_key(value): if item not in words[value]: l = words[value] l.append(item) words[value] = l else: words[value] = [ item ] self.setIndex(words) id = self._itemToId(item) if self._ids.has_key(value): if id not in self._ids[value]: l = self._ids[value] l.append(id) self._ids[value] = l else: self._ids[value] = [id] self._p_changed = 1 def _withdrawItem( self, value, item ): if self._words.has_key(value): if item in self._words[value]: words = self._words l = words[value] l.remove(item) words[value] = l self.setIndex(words) id = self._itemToId(item) if self._ids.has_key(value): if id in self._ids[value]: l = self._ids[value] l.remove(id) self._ids[value] = l self._p_changed = 1 def _itemToId(self, item): #to be overloaded return "" def initIndex(self): self._words = OOBTree() self._ids = OOBTree() self._p_changed = 1 def getLowerIndex(self): if self._words.keys(): return min(self._words.keys()) return None
def clear(self): """Empty the index""" self._length = Length() self._end2uid = OOBTree() self._start2uid = OOBTree() self._uid2end = IOBTree() # Contains the index used in _end2uid self._uid2duration = IOBTree() # Contains the duration self._uid2start = IOBTree() self._uid2recurrence = IOBTree()
def setUp(self): setup.placelessSetUp() provideAdapter(ZodbObjectHistory) tree = OOBTree() tree[1] = 42 tree[2] = 23 tree[3] = 17 state = tree.__getstate__() self.state = EmptyOOBTreeState(None, state, None)
class Portlet(Persistent): __name__ = None __parent__ = None type_name = u"Portlet" type_title = _(u"Portlet") type_description = _(u"A mini view rendered ") portlet_type = u"" add_permission = "Add %s" % type_name def __init__(self, portlet_type, **kw): self.uid = unicode(uuid4()) self.portlet_type = portlet_type self.__settings__ = OOBTree() settings = kw.pop('settings', {}) self.settings = settings self.__dict__.update(**kw) super(Portlet, self).__init__() @property def title(self): return self.settings.get('title', getattr(self.portlet_adapter, 'title', u'')) @property def description(self): return self.settings.get('description', getattr(self.portlet_adapter, 'description', u'')) @property def settings(self): return self.__settings__ @settings.setter def settings(self, value): self.__settings__.clear() self.__settings__.update(value) @property def schema_factory(self): return self.portlet_adapter.schema_factory @property def portlet_adapter(self): reg = get_current_registry() return reg.getAdapter(self, IPortletType, name = self.portlet_type) @property def slot(self): try: return self.__parent__.slot except AttributeError: pass def render(self, context, request, view, **kw): try: return self.portlet_adapter.render(context, request, view, **kw) except ComponentLookupError: _logger.error("portlet %r not found for context %r" % (self.portlet_type, context)) return ""
def __setitem__(self, key, data): if key == 'display': OOBTree.__setitem__(self, key, data) else: text = self.setDisplayAttribute(key, data) item = Item(data) OOBTree.__setitem__(self, key, item) self._display[key] = text self._p_changed = 1 return
def __init__(self, id, title=None, cookie_name=''): self._id = self.id = id self.title = title if cookie_name: self.cookie_name = cookie_name self._userid_to_uuid = OOBTree() # userid : uuid.uuid4().hex self._uuid_to_time = OLBTree() # uuid : int(time.time()) self._uuid_to_userid = OOBTree() # uuid.uuid4().hex : userid
def test_normal(self): from BTrees.OOBTree import OOBTree tree = OOBTree() for i in range(31): tree[i] = 2*i state = tree.__getstate__() self.assertEqual(len(state), 2) self.assertEqual(len(state[0]), 3) self.assertEqual(state[0][1], 15) self._callFUT(tree) #noraise
class Day: def __init__( self, cal, day ): self._calendar = cal self._day = day self._confs = OOBTree() self._categs = [] def _getCalendar( self ): return self._calendar def addConference(self,conf,categList,tz): for categ in categList: if categ not in self._categs: self._categs.append(categ) t = conf.getStartDate().astimezone(tz).time() if not self._confs.has_key(t): self._confs[t]=set() self._confs[t].add(conf) #sorting functions that caches calculated start times for every conf def _sortFunc(self, x,y): return cmp(self._cache[x], self._cache[y]) def _calculateCache(self, confs): self._cache = {} for conf in confs: self._cache[conf] = conf.calculateDayStartTime(self._day).time() def getConferences(self): return [conf for confs in self._confs.values() for conf in confs] def getConferencesWithStartTime(self): res= [conf for confs in self._confs.values() for conf in confs] self._calculateCache(res) if res!=[]: res.sort(self._sortFunc) return [(event, self._cache[event]) for event in res] def getCategories(self): return self._categs def getWeekDay( self ): return calendar.weekday( self._day.year, \ self._day.month, \ self._day.day ) def getDayNumber( self ): return self._day.day def getDate( self ): return self._day def __str__( self ): return "CalendarDay at '%s': %s --> %s"%(self._day, self._confs, self._categs)
def __init__(self): self._members = OOBTree()
def __init__(self, parent): self.__parent__ = parent self._fields = OOBTree() self._values = OOBTree()
def group(self, seq): sortIndex = self._sortIndex sortReverse = self._sortReverse ns = len(seq) ni = len(sortIndex) if ns >= 0.1 * ni: # result large compared to index -- sort via index handled = IISet() hn = 0 _load = getattr(sortIndex, '_load', None) if _load is None: # not an optimized index items = sortIndex.items() _load = lambda (x1, x2): x2 if sortReverse: items.reverse() elif sortReverse: gRO = getattr(sortIndex, 'getReverseOrder', None) items = gRO and gRO() if items is None: items = list(sortIndex._index.keys()) items.reverse() else: items = sortIndex._index.keys() for i in items: ids = intersection(seq, _load(i)) if ids: handled.update(ids) hn += len(ids) yield i, ids if hn != len(seq): yield None, difference(seq, handled) else: # result relatively small -- sort via result m = OOBTree() keyFor = getattr(sortIndex, 'keyForDocument', None) # work around "nogopip" bug: it defines "keyForDocument" as an integer if not callable(keyFor): # this will fail, when the index neither defines a reasonable # "keyForDocument" nor "documentToKeyMap". In this case, # the index cannot be used for sorting. keyFor = lambda doc, map=sortIndex.documentToKeyMap(): map[doc] noValue = IITreeSet() for doc in seq.keys(): try: k = keyFor(doc) except KeyError: noValue.insert(doc) continue k = NaturalObjectCompare(k) l = m.get(k) if l is None: l = m[k] = IITreeSet() l.insert(doc) items = m.items() if sortReverse: items = list(items) items.reverse() for i in items: yield i if noValue: yield None, noValue
class BTreeFolder2Base(Persistent): """Base for BTree-based folders. """ security = ClassSecurityInfo() manage_options = (({ 'label': 'Contents', 'action': 'manage_main' }, ) + Folder.manage_options[1:]) security.declareProtected(view_management_screens, 'manage_main') manage_main = DTMLFile('contents', globals()) _tree = None # OOBTree: { id -> object } _count = None # A BTrees.Length _v_nextid = 0 # The integer component of the next generated ID _mt_index = None # OOBTree: { meta_type -> OIBTree: { id -> 1 } } title = '' # superValues() looks for the _objects attribute, but the implementation # would be inefficient, so superValues() support is disabled. _objects = () def __init__(self, id=None): if id is not None: self.id = id self._initBTrees() def _initBTrees(self): self._tree = OOBTree() self._count = Length() self._mt_index = OOBTree() def _populateFromFolder(self, source): """Fill this folder with the contents of another folder. """ for name in source.objectIds(): value = source._getOb(name, None) if value is not None: self._setOb(name, aq_base(value)) security.declareProtected(view_management_screens, 'manage_fixCount') def manage_fixCount(self): """Calls self._fixCount() and reports the result as text. """ old, new = self._fixCount() path = '/'.join(self.getPhysicalPath()) if old == new: return "No count mismatch detected in BTreeFolder2 at %s." % path else: return ("Fixed count mismatch in BTreeFolder2 at %s. " "Count was %d; corrected to %d" % (path, old, new)) def _fixCount(self): """Checks if the value of self._count disagrees with len(self.objectIds()). If so, corrects self._count. Returns the old and new count values. If old==new, no correction was performed. """ old = self._count() new = len(self.objectIds()) if old != new: self._count.set(new) return old, new security.declareProtected(view_management_screens, 'manage_cleanup') def manage_cleanup(self): """Calls self._cleanup() and reports the result as text. """ v = self._cleanup() path = '/'.join(self.getPhysicalPath()) if v: return "No damage detected in BTreeFolder2 at %s." % path else: return ("Fixed BTreeFolder2 at %s. " "See the log for more details." % path) def _cleanup(self): """Cleans up errors in the BTrees. Certain ZODB bugs have caused BTrees to become slightly insane. Fortunately, there is a way to clean up damaged BTrees that always seems to work: make a new BTree containing the items() of the old one. Returns 1 if no damage was detected, or 0 if damage was detected and fixed. """ from BTrees.check import check path = '/'.join(self.getPhysicalPath()) try: check(self._tree) for key in self._tree.keys(): if key not in self._tree: raise AssertionError("Missing value for key: %s" % repr(key)) check(self._mt_index) for key, value in self._mt_index.items(): if (key not in self._mt_index or self._mt_index[key] is not value): raise AssertionError( "Missing or incorrect meta_type index: %s" % repr(key)) check(value) for k in value.keys(): if k not in value: raise AssertionError( "Missing values for meta_type index: %s" % repr(key)) return 1 except AssertionError: LOG.warn('Detected damage to %s. Fixing now.' % path, exc_info=sys.exc_info()) try: self._tree = OOBTree(self._tree) mt_index = OOBTree() for key, value in self._mt_index.items(): mt_index[key] = OIBTree(value) self._mt_index = mt_index except: LOG.error('Failed to fix %s.' % path, exc_info=sys.exc_info()) raise else: LOG.info('Fixed %s.' % path) return 0 def _getOb(self, id, default=_marker): """Return the named object from the folder. """ try: return self._tree[id].__of__(self) except KeyError: if default is _marker: raise else: return default security.declareProtected(access_contents_information, 'get') def get(self, name, default=None): return self._getOb(name, default) def __getitem__(self, name): return self._getOb(name) def __getattr__(self, name): # Boo hoo hoo! Zope 2 prefers implicit acquisition over traversal # to subitems, and __bobo_traverse__ hooks don't work with # restrictedTraverse() unless __getattr__() is also present. # Oh well. try: return self._tree[name] except KeyError: raise AttributeError(name) def _setOb(self, id, object): """Store the named object in the folder. """ tree = self._tree if id in tree: raise KeyError('There is already an item named "%s".' % id) tree[id] = object self._count.change(1) # Update the meta type index. mti = self._mt_index meta_type = getattr(object, 'meta_type', None) if meta_type is not None: ids = mti.get(meta_type, None) if ids is None: ids = OIBTree() mti[meta_type] = ids ids[id] = 1 def _delOb(self, id): """Remove the named object from the folder. """ tree = self._tree meta_type = getattr(tree[id], 'meta_type', None) del tree[id] self._count.change(-1) # Update the meta type index. if meta_type is not None: mti = self._mt_index ids = mti.get(meta_type, None) if ids is not None and id in ids: del ids[id] if not ids: # Removed the last object of this meta_type. # Prune the index. del mti[meta_type] security.declareProtected(view_management_screens, 'getBatchObjectListing') def getBatchObjectListing(self, REQUEST=None): """Return a structure for a page template to show the list of objects. """ if REQUEST is None: REQUEST = {} pref_rows = int(REQUEST.get('dtpref_rows', 20)) b_start = int(REQUEST.get('b_start', 1)) b_count = int(REQUEST.get('b_count', 1000)) b_end = b_start + b_count - 1 url = self.absolute_url() + '/manage_main' idlist = self.objectIds() # Pre-sorted. count = self.objectCount() if b_end < count: next_url = url + '?b_start=%d' % (b_start + b_count) else: b_end = count next_url = '' if b_start > 1: prev_url = url + '?b_start=%d' % max(b_start - b_count, 1) else: prev_url = '' formatted = [] formatted.append(listtext0 % pref_rows) for i in range(b_start - 1, b_end): optID = escape(idlist[i]) formatted.append(listtext1 % (escape(optID, quote=1), optID)) formatted.append(listtext2) return { 'b_start': b_start, 'b_end': b_end, 'prev_batch_url': prev_url, 'next_batch_url': next_url, 'formatted_list': ''.join(formatted) } security.declareProtected(view_management_screens, 'manage_object_workspace') def manage_object_workspace(self, ids=(), REQUEST=None): '''Redirects to the workspace of the first object in the list.''' if ids and REQUEST is not None: REQUEST.RESPONSE.redirect('%s/%s/manage_workspace' % (self.absolute_url(), quote(ids[0]))) else: return self.manage_main(self, REQUEST) security.declareProtected(access_contents_information, 'tpValues') def tpValues(self): """Ensures the items don't show up in the left pane. """ return () security.declareProtected(access_contents_information, 'objectCount') def objectCount(self): """Returns the number of items in the folder.""" return self._count() def __len__(self): return self.objectCount() def __nonzero__(self): return True security.declareProtected(access_contents_information, 'has_key') def has_key(self, id): """Indicates whether the folder has an item by ID. """ return id in self._tree # backward compatibility security.declareProtected(access_contents_information, 'hasObject') hasObject = has_key security.declareProtected(access_contents_information, 'objectIds') def objectIds(self, spec=None): # Returns a list of subobject ids of the current object. # If 'spec' is specified, returns objects whose meta_type # matches 'spec'. if spec is None: return self._tree.keys() if isinstance(spec, str): spec = [spec] set = None mti = self._mt_index for meta_type in spec: ids = mti.get(meta_type, None) if ids is not None: set = union(set, ids) if set is None: return () else: return set.keys() def __contains__(self, name): return name in self._tree def __iter__(self): return iter(self.objectIds()) security.declareProtected(access_contents_information, 'objectValues') def objectValues(self, spec=None): # Returns a list of actual subobjects of the current object. # If 'spec' is specified, returns only objects whose meta_type # match 'spec'. return LazyMap(self._getOb, self.objectIds(spec)) security.declareProtected(access_contents_information, 'objectItems') def objectItems(self, spec=None): # Returns a list of (id, subobject) tuples of the current object. # If 'spec' is specified, returns only objects whose meta_type match # 'spec' return LazyMap(lambda id, _getOb=self._getOb: (id, _getOb(id)), self.objectIds(spec)) security.declareProtected(access_contents_information, 'keys', 'items', 'values') keys = objectIds values = objectValues items = objectItems security.declareProtected(access_contents_information, 'objectMap') def objectMap(self): # Returns a tuple of mappings containing subobject meta-data. return LazyMap( lambda (k, v): { 'id': k, 'meta_type': getattr(v, 'meta_type', None) }, self._tree.items(), self._count()) security.declareProtected(access_contents_information, 'objectIds_d') def objectIds_d(self, t=None): ids = self.objectIds(t) res = {} for id in ids: res[id] = 1 return res security.declareProtected(access_contents_information, 'objectMap_d') def objectMap_d(self, t=None): return self.objectMap() def _checkId(self, id, allow_dup=0): if not allow_dup and id in self: raise BadRequestException('The id "%s" is invalid--' 'it is already in use.' % id) def _setObject(self, id, object, roles=None, user=None, set_owner=1, suppress_events=False): ob = object # better name, keep original function signature v = self._checkId(id) if v is not None: id = v # If an object by the given id already exists, remove it. if id in self: self._delObject(id) if not suppress_events: notify(ObjectWillBeAddedEvent(ob, self, id)) self._setOb(id, ob) ob = self._getOb(id) if set_owner: # TODO: eventify manage_fixupOwnershipAfterAdd # This will be called for a copy/clone, or a normal _setObject. ob.manage_fixupOwnershipAfterAdd() # Try to give user the local role "Owner", but only if # no local roles have been set on the object yet. if getattr(ob, '__ac_local_roles__', _marker) is None: user = getSecurityManager().getUser() if user is not None: userid = user.getId() if userid is not None: ob.manage_setLocalRoles(userid, ['Owner']) if not suppress_events: notify(ObjectAddedEvent(ob, self, id)) notifyContainerModified(self) compatibilityCall('manage_afterAdd', ob, ob, self) return id def __setitem__(self, key, value): return self._setObject(key, value) def _delObject(self, id, dp=1, suppress_events=False): ob = self._getOb(id) compatibilityCall('manage_beforeDelete', ob, ob, self) if not suppress_events: notify(ObjectWillBeRemovedEvent(ob, self, id)) self._delOb(id) if not suppress_events: notify(ObjectRemovedEvent(ob, self, id)) notifyContainerModified(self) def __delitem__(self, name): return self._delObject(id=name) # Utility for generating unique IDs. security.declareProtected(access_contents_information, 'generateId') def generateId(self, prefix='item', suffix='', rand_ceiling=999999999): """Returns an ID not used yet by this folder. The ID is unlikely to collide with other threads and clients. The IDs are sequential to optimize access to objects that are likely to have some relation. """ tree = self._tree n = self._v_nextid attempt = 0 while 1: if n % 4000 != 0 and n <= rand_ceiling: id = '%s%d%s' % (prefix, n, suffix) if id not in tree: break n = randint(1, rand_ceiling) attempt = attempt + 1 if attempt > MAX_UNIQUEID_ATTEMPTS: # Prevent denial of service raise ExhaustedUniqueIdsError self._v_nextid = n + 1 return id
class _Records(object): """The records stored in the registry. This implements dict-like access to records, where as the Registry object implements dict-like read-only access to values. """ __parent__ = None # Similar to zope.schema._field._isdotted, but allows up to one '/' _validkey = re.compile(r"([a-zA-Z][a-zA-Z0-9_-]*)" r"([.][a-zA-Z][a-zA-Z0-9_-]*)*" r"([/][a-zA-Z][a-zA-Z0-9_-]*)?" r"([.][a-zA-Z][a-zA-Z0-9_-]*)*" # use the whole line r"$").match def __init__(self, parent): self.__parent__ = parent self._fields = OOBTree() self._values = OOBTree() def __setitem__(self, name, record): if not self._validkey(name): raise InvalidRegistryKey(record) if not IRecord.providedBy(record): raise ValueError("Value must be a record") self._setField(name, record.field) self._values[name] = record.value record.__name__ = name record.__parent__ = self.__parent__ notify(RecordAddedEvent(record)) def __delitem__(self, name): record = self[name] # unbind the record so that it won't attempt to look up values from # the registry anymore record.__parent__ = None del self._fields[name] del self._values[name] notify(RecordRemovedEvent(record)) def __getitem__(self, name): field = self._getField(name) value = self._values[name] record = Record(field, value, _validate=False) record.__name__ = name record.__parent__ = self.__parent__ return record def get(self, name, default=None): try: return self[name] except KeyError: return default def __nonzero__(self): return self._values.__nonzero__() def __len__(self): return self._values.__len__() def __iter__(self): return self._values.__iter__() def has_key(self, name): return self._values.__contains__(name) def __contains__(self, name): return self._values.__contains__(name) def keys(self, min=None, max=None): return self._values.keys(min, max) def maxKey(self, key=None): return self._values.maxKey(key) def minKey(self, key=None): return self._values.minKey(key) def values(self, min=None, max=None): return [self[name] for name in self.keys(min, max)] def items(self, min=None, max=None): return [( name, self[name], ) for name in self.keys(min, max)] def setdefault(self, key, value): if key not in self: self[key] = value return self[key] def clear(self): self._fields.clear() self._values.clear() # Helper methods def _getField(self, name): field = self._fields[name] # Handle field reference pointers if isinstance(field, basestring): recordName = field while isinstance(field, basestring): recordName = field field = self._fields[recordName] field = FieldRef(recordName, field) return field def _setField(self, name, field): if not IPersistentField.providedBy(field): raise ValueError("The record's field must be an IPersistentField.") if IFieldRef.providedBy(field): if field.recordName not in self._fields: raise ValueError( "Field reference points to non-existent record") self._fields[name] = field.recordName # a pointer, of sorts else: field.__name__ = 'value' self._fields[name] = field
def set_to_oobtree_if_not_set(root, path: str) -> None: if not hasattr(root, path): setattr(root, path, OOBTree())
def initBucketTree(self): """ Initialize the Bucket Tree """ self._tree = OOBTree()
elif func == 'Btree': ''' Function: create btree of a column of certain table Input: table-table to perform operations on; table name(string)-name of table; col(string)-column to perform operations on Output: None Effect to globals: btree, btree-index Reference: BTrees ''' start_time = time.time() tablename_opera, column_opera = param.split(",") column_index = expre[tablename_opera][0].index(column_opera) table_opera = expre[tablename_opera] global btree_index tree = OOBTree() #build new tree tree.update({-1: table_opera[0]}) for line in table_opera[1:]: k = line[column_index] while list(tree.values(k, k)): random.seed(k) k = k + random.randint(100, 1000) tree.update({k: line}) end_time = time.time() print("Time used: ", end_time - start_time, "ns") elif func == 'Hash': ''' Function: create hash of a column of certain table Input: table-table to perform operations on; table name(string)-name of table; col(string)-column to perform operations on
def __init__(self, context): self.context = context annot = IAnnotations(context) self.listen_annot = annot.setdefault(PROJECTNAME, OOBTree()) self.migration_annot = self.listen_annot.setdefault( 'migration', PersistentList())
def __init__(self): self._requests = OOBTree()
class MemberDataTool(UniqueObject, SimpleItem, PropertyManager, ActionProviderBase): """ This tool wraps user objects, making them act as Member objects. """ __implements__ = (IMemberDataTool, ActionProviderBase.__implements__) id = 'portal_memberdata' meta_type = 'CMF Member Data Tool' _actions = () _v_temps = None _properties = () security = ClassSecurityInfo() manage_options = (ActionProviderBase.manage_options + ({ 'label': 'Overview', 'action': 'manage_overview' }, { 'label': 'Contents', 'action': 'manage_showContents' }) + PropertyManager.manage_options + SimpleItem.manage_options) # # ZMI methods # security.declareProtected(ManagePortal, 'manage_overview') manage_overview = DTMLFile('explainMemberDataTool', _dtmldir) security.declareProtected(ViewManagementScreens, 'manage_showContents') manage_showContents = DTMLFile('memberdataContents', _dtmldir) def __init__(self): self._members = OOBTree() # Create the default properties. self._setProperty('email', '', 'string') self._setProperty('portal_skin', '', 'string') self._setProperty('listed', '', 'boolean') self._setProperty('login_time', '2000/01/01', 'date') self._setProperty('last_login_time', '2000/01/01', 'date') # # 'portal_memberdata' interface methods # security.declarePrivate('getMemberDataContents') def getMemberDataContents(self): ''' Return the number of members stored in the _members BTree and some other useful info ''' membertool = getToolByName(self, 'portal_membership') members = self._members user_list = membertool.listMemberIds() member_list = members.keys() member_count = len(members) orphan_count = 0 for member in member_list: if member not in user_list: orphan_count = orphan_count + 1 return [{'member_count': member_count, 'orphan_count': orphan_count}] security.declarePrivate('searchMemberData') def searchMemberData(self, search_param, search_term, attributes=()): """ Search members. """ res = [] if not search_param: return res membership = getToolByName(self, 'portal_membership') if len(attributes) == 0: attributes = ('id', 'email') if search_param == 'username': search_param = 'id' for user_id in self._members.keys(): u = membership.getMemberById(user_id) if u is not None: memberProperty = u.getProperty searched = memberProperty(search_param, None) if searched is not None and searched.find(search_term) != -1: user_data = {} for desired in attributes: if desired == 'id': user_data['username'] = memberProperty(desired, '') else: user_data[desired] = memberProperty(desired, '') res.append(user_data) return res security.declarePrivate('searchMemberDataContents') def searchMemberDataContents(self, search_param, search_term): """ Search members. This method will be deprecated soon. """ res = [] if search_param == 'username': search_param = 'id' mtool = getToolByName(self, 'portal_membership') for member_id in self._members.keys(): user_wrapper = mtool.getMemberById(member_id) if user_wrapper is not None: memberProperty = user_wrapper.getProperty searched = memberProperty(search_param, None) if searched is not None and searched.find(search_term) != -1: res.append({ 'username': memberProperty('id'), 'email': memberProperty('email', '') }) return res security.declarePrivate('pruneMemberDataContents') def pruneMemberDataContents(self): """ Delete data contents of all members not listet in acl_users. """ membertool = getToolByName(self, 'portal_membership') members = self._members user_list = membertool.listMemberIds() for tuple in members.items(): member_name = tuple[0] member_obj = tuple[1] if member_name not in user_list: del members[member_name] security.declarePrivate('wrapUser') def wrapUser(self, u): ''' If possible, returns the Member object that corresponds to the given User object. ''' id = u.getId() members = self._members if not members.has_key(id): # Get a temporary member that might be # registered later via registerMemberData(). temps = self._v_temps if temps is not None and temps.has_key(id): m = temps[id] else: base = aq_base(self) m = MemberData(base, id) if temps is None: self._v_temps = {id: m} if hasattr(self, 'REQUEST'): # No REQUEST during tests. self.REQUEST._hold(CleanupTemp(self)) else: temps[id] = m else: m = members[id] # Return a wrapper with self as containment and # the user as context. return m.__of__(self).__of__(u) security.declarePrivate('registerMemberData') def registerMemberData(self, m, id): """ Add the given member data to the _members btree. """ self._members[id] = aq_base(m) security.declarePrivate('deleteMemberData') def deleteMemberData(self, member_id): """ Delete member data of specified member. """ members = self._members if members.has_key(member_id): del members[member_id] return 1 else: return 0
class PasswordResetTool(UniqueObject, SimpleItem): """Provides a default implementation for a password reset scheme. From a 'forgotten password' template, you submit your username to a handler script that does a 'requestReset', and sends an email with an unguessable unique hash in a url as built by 'constructURL' to the user. The user visits that URL (the 'reset form') and enters their username, """ id = 'portal_password_reset' meta_type = 'Password Reset Tool' security = ClassSecurityInfo() def __init__(self): self._requests = OOBTree() # Internal attributes _user_check = True _timedelta = 7 # DAYS # Interface fulfillment ## @security.protected(ManagePortal) def requestReset(self, userid): """Ask the system to start the password reset procedure for user 'userid'. Returns a dictionary with the random string that must be used to reset the password in 'randomstring', the expiration date as a datetime in 'expires', and the userid (for convenience) in 'userid'. Returns None if no such user. """ if not self.getValidUser(userid): return None randomstring = self.uniqueString(userid) expiry = self.expirationDate() self._requests[randomstring] = (userid, expiry) # clear out untouched records more than 10 days old self.clearExpired(10) # this is a cheap sort of "automatic" clearing self._p_changed = 1 retval = {} retval['randomstring'] = randomstring retval['expires'] = expiry retval['userid'] = userid return retval @security.public def resetPassword(self, userid, randomstring, password): """Set the password (in 'password') for the user who maps to the string in 'randomstring' iff the entered 'userid' is equal to the mapped userid. (This can be turned off with the 'toggleUserCheck' method.) Note that this method will *not* check password validity: this must be done by the caller. Throws an 'ExpiredRequestError' if request is expired. Throws an 'InvalidRequestError' if no such record exists, or 'userid' is not in the record. """ if get_member_by_login_name: found_member = get_member_by_login_name(self, userid, raise_exceptions=False) if found_member is not None: userid = found_member.getId() try: stored_user, expiry = self._requests[randomstring] except KeyError: raise InvalidRequestError if self.checkUser() and (userid != stored_user): raise InvalidRequestError if self.expired(expiry): del self._requests[randomstring] self._p_changed = 1 raise ExpiredRequestError member = self.getValidUser(stored_user) if not member: raise InvalidRequestError # actually change password user = member.getUser() uf = getToolByName(self, 'acl_users') uf.userSetPassword(user.getUserId(), password) member.setMemberProperties(dict(must_change_password=0)) # clean out the request del self._requests[randomstring] self._p_changed = 1 @security.protected(ManagePortal) def setExpirationTimeout(self, timedelta): """Set the length of time a reset request will be valid in days. """ self._timedelta = abs(timedelta) @security.public def getExpirationTimeout(self): """Get the length of time a reset request will be valid. """ return self._timedelta @security.public def checkUser(self): """Returns a boolean representing the state of 'user check' as described in 'toggleUserCheck'. True means on, and is the default.""" return self._user_check @security.public def verifyKey(self, key): """Verify a key. Raises an exception if the key is invalid or expired. """ try: u, expiry = self._requests[key] except KeyError: raise InvalidRequestError if self.expired(expiry): raise ExpiredRequestError if not self.getValidUser(u): raise InvalidRequestError('No such user') @security.private def clearExpired(self, days=0): """Destroys all expired reset request records. Parameter controls how many days past expired it must be to disappear. """ now = datetime.datetime.utcnow() for key, record in self._requests.items(): stored_user, expiry = record if self.expired(expiry, now - datetime.timedelta(days=days)): del self._requests[key] self._p_changed = 1 # customization points @security.private def uniqueString(self, userid): """Returns a string that is random and unguessable, or at least as close as possible. This is used by 'requestReset' to generate the auth string. Override if you wish different format. This implementation ignores userid and simply generates a UUID. That parameter is for convenience of extenders, and will be passed properly in the default implementation. """ uuid_generator = getUtility(IUUIDGenerator) return uuid_generator() @security.private def expirationDate(self): """Returns a DateTime for exipiry of a request from the current time. This is used by housekeeping methods (like clearEpired) and stored in reset request records.""" return (datetime.datetime.utcnow() + datetime.timedelta(days=self._timedelta)) @security.private def getValidUser(self, userid): """Returns the member with 'userid' if available and None otherwise.""" if get_member_by_login_name: registry = getUtility(IRegistry) settings = registry.forInterface(ISecuritySchema, prefix='plone') if settings.use_email_as_login: return get_member_by_login_name(self, userid, raise_exceptions=False) membertool = getToolByName(self, 'portal_membership') return membertool.getMemberById(userid) @security.private def expired(self, dt, now=None): """Tells whether a DateTime or timestamp 'datetime' is expired with regards to either 'now', if provided, or the current time.""" if not now: now = datetime.datetime.utcnow() return now >= dt
class ZODBRoleManager(BasePlugin): """ PAS plugin for managing roles in the ZODB. """ meta_type = 'ZODB Role Manager' security = ClassSecurityInfo() def __init__(self, id, title=None): self._id = self.id = id self.title = title self._roles = OOBTree() self._principal_roles = OOBTree() def manage_afterAdd(self, item, container): if item is self: role_holder = aq_parent(aq_inner(container)) for role in getattr(role_holder, '__ac_roles__', ()): try: if role not in ('Anonymous', 'Authenticated'): self.addRole(role) except KeyError: pass if 'Manager' not in self._roles: self.addRole('Manager') # # IRolesPlugin implementation # security.declarePrivate('getRolesForPrincipal') def getRolesForPrincipal(self, principal, request=None): """ See IRolesPlugin. """ result = list(self._principal_roles.get(principal.getId(), ())) getGroups = getattr(principal, 'getGroups', lambda x: ()) for group_id in getGroups(): result.extend(self._principal_roles.get(group_id, ())) return tuple(result) # # IRoleEnumerationPlugin implementation # def enumerateRoles(self, id=None, exact_match=False, sort_by=None, max_results=None, **kw): """ See IRoleEnumerationPlugin. """ role_info = [] role_ids = [] plugin_id = self.getId() if isinstance(id, str): id = [id] if exact_match and (id): role_ids.extend(id) if role_ids: role_filter = None else: # Searching role_ids = self.listRoleIds() role_filter = _ZODBRoleFilter(id, **kw) for role_id in role_ids: if self._roles.get(role_id): e_url = '%s/manage_roles' % self.getId() p_qs = 'role_id=%s' % role_id m_qs = 'role_id=%s&assign=1' % role_id info = {} info.update(self._roles[role_id]) info['pluginid'] = plugin_id info['properties_url'] = '%s?%s' % (e_url, p_qs) info['members_url'] = '%s?%s' % (e_url, m_qs) if not role_filter or role_filter(info): role_info.append(info) return tuple(role_info) # # IRoleAssignerPlugin implementation # security.declarePrivate('doAssignRoleToPrincipal') def doAssignRoleToPrincipal(self, principal_id, role): return self.assignRoleToPrincipal(role, principal_id) security.declarePrivate('doRemoveRoleFromPrincipal') def doRemoveRoleFromPrincipal(self, principal_id, role): return self.removeRoleFromPrincipal(role, principal_id) # # Role management API # security.declareProtected(ManageUsers, 'listRoleIds') def listRoleIds(self): """ Return a list of the role IDs managed by this object. """ return self._roles.keys() security.declareProtected(ManageUsers, 'listRoleInfo') def listRoleInfo(self): """ Return a list of the role mappings. """ return self._roles.values() security.declareProtected(ManageUsers, 'getRoleInfo') def getRoleInfo(self, role_id): """ Return a role mapping. """ return self._roles[role_id] security.declareProtected(ManageUsers, 'addRole') def addRole(self, role_id, title='', description=''): """ Add 'role_id' to the list of roles managed by this object. o Raise KeyError on duplicate. """ if self._roles.get(role_id) is not None: raise KeyError, 'Duplicate role: %s' % role_id self._roles[role_id] = { 'id': role_id, 'title': title, 'description': description } security.declareProtected(ManageUsers, 'updateRole') def updateRole(self, role_id, title, description): """ Update title and description for the role. o Raise KeyError if not found. """ self._roles[role_id].update({ 'title': title, 'description': description }) security.declareProtected(ManageUsers, 'removeRole') def removeRole(self, role_id, REQUEST=None): """ Remove 'role_id' from the list of roles managed by this object. o Raise KeyError if not found. """ for principal_id in self._principal_roles.keys(): self.removeRoleFromPrincipal(role_id, principal_id) del self._roles[role_id] removeRole = postonly(removeRole) # # Role assignment API # security.declareProtected(ManageUsers, 'listAvailablePrincipals') def listAvailablePrincipals(self, role_id, search_id): """ Return a list of principal IDs to whom a role can be assigned. o If supplied, 'search_id' constrains the principal IDs; if not, return empty list. o Omit principals with existing assignments. """ result = [] if search_id: # don't bother searching if no criteria parent = aq_parent(self) for info in parent.searchPrincipals(max_results=20, sort_by='id', id=search_id, exact_match=False): id = info['id'] title = info.get('title', id) if (role_id not in self._principal_roles.get(id, ()) and role_id != id): result.append((id, title)) return result security.declareProtected(ManageUsers, 'listAssignedPrincipals') def listAssignedPrincipals(self, role_id): """ Return a list of principal IDs to whom a role is assigned. """ result = [] for k, v in self._principal_roles.items(): if role_id in v: # should be at most one and only one mapping to 'k' parent = aq_parent(self) info = parent.searchPrincipals(id=k, exact_match=True) if len(info) > 1: LOG.error( 'searchPrincipals() returned more than one result ' 'for id=%s' % k) assert len(info) in (0, 1) if len(info) == 0: title = '<%s: not found>' % k else: title = info[0].get('title', k) result.append((k, title)) return result security.declareProtected(ManageUsers, 'assignRoleToPrincipal') def assignRoleToPrincipal(self, role_id, principal_id, REQUEST=None): """ Assign a role to a principal (user or group). o Return a boolean indicating whether a new assignment was created. o Raise KeyError if 'role_id' is unknown. """ role_info = self._roles[role_id] # raise KeyError if unknown! current = self._principal_roles.get(principal_id, ()) already = role_id in current if not already: new = current + (role_id, ) self._principal_roles[principal_id] = new self._invalidatePrincipalCache(principal_id) return not already assignRoleToPrincipal = postonly(assignRoleToPrincipal) security.declareProtected(ManageUsers, 'removeRoleFromPrincipal') def removeRoleFromPrincipal(self, role_id, principal_id, REQUEST=None): """ Remove a role from a principal (user or group). o Return a boolean indicating whether the role was already present. o Raise KeyError if 'role_id' is unknown. o Ignore requests to remove a role not already assigned to the principal. """ role_info = self._roles[role_id] # raise KeyError if unknown! current = self._principal_roles.get(principal_id, ()) new = tuple([x for x in current if x != role_id]) already = current != new if already: self._principal_roles[principal_id] = new self._invalidatePrincipalCache(principal_id) return already removeRoleFromPrincipal = postonly(removeRoleFromPrincipal) # # ZMI # manage_options = (({ 'label': 'Roles', 'action': 'manage_roles', }, ) + BasePlugin.manage_options) security.declareProtected(ManageUsers, 'manage_roles') manage_roles = PageTemplateFile('www/zrRoles', globals(), __name__='manage_roles') security.declareProtected(ManageUsers, 'manage_twoLists') manage_twoLists = PageTemplateFile('../www/two_lists', globals(), __name__='manage_twoLists') security.declareProtected(ManageUsers, 'manage_addRole') def manage_addRole(self, role_id, title, description, RESPONSE): """ Add a role via the ZMI. """ self.addRole(role_id, title, description) message = 'Role+added' RESPONSE.redirect('%s/manage_roles?manage_tabs_message=%s' % (self.absolute_url(), message)) security.declareProtected(ManageUsers, 'manage_updateRole') def manage_updateRole(self, role_id, title, description, RESPONSE): """ Update a role via the ZMI. """ self.updateRole(role_id, title, description) message = 'Role+updated' RESPONSE.redirect('%s/manage_roles?role_id=%s&manage_tabs_message=%s' % (self.absolute_url(), role_id, message)) security.declareProtected(ManageUsers, 'manage_removeRoles') def manage_removeRoles(self, role_ids, RESPONSE, REQUEST=None): """ Remove one or more roles via the ZMI. """ role_ids = filter(None, role_ids) if not role_ids: message = 'no+roles+selected' else: for role_id in role_ids: self.removeRole(role_id) message = 'Roles+removed' RESPONSE.redirect('%s/manage_roles?manage_tabs_message=%s' % (self.absolute_url(), message)) manage_removeRoles = postonly(manage_removeRoles) security.declareProtected(ManageUsers, 'manage_assignRoleToPrincipals') def manage_assignRoleToPrincipals(self, role_id, principal_ids, RESPONSE, REQUEST=None): """ Assign a role to one or more principals via the ZMI. """ assigned = [] for principal_id in principal_ids: if self.assignRoleToPrincipal(role_id, principal_id): assigned.append(principal_id) if not assigned: message = 'Role+%s+already+assigned+to+all+principals' % role_id else: message = 'Role+%s+assigned+to+%s' % (role_id, '+'.join(assigned)) RESPONSE.redirect( ('%s/manage_roles?role_id=%s&assign=1' + '&manage_tabs_message=%s') % (self.absolute_url(), role_id, message)) manage_assignRoleToPrincipals = postonly(manage_assignRoleToPrincipals) security.declareProtected(ManageUsers, 'manage_removeRoleFromPrincipals') def manage_removeRoleFromPrincipals(self, role_id, principal_ids, RESPONSE, REQUEST=None): """ Remove a role from one or more principals via the ZMI. """ removed = [] for principal_id in principal_ids: if self.removeRoleFromPrincipal(role_id, principal_id): removed.append(principal_id) if not removed: message = 'Role+%s+alread+removed+from+all+principals' % role_id else: message = 'Role+%s+removed+from+%s' % (role_id, '+'.join(removed)) RESPONSE.redirect( ('%s/manage_roles?role_id=%s&assign=1' + '&manage_tabs_message=%s') % (self.absolute_url(), role_id, message)) manage_removeRoleFromPrincipals = postonly(manage_removeRoleFromPrincipals)
def update(self, language, items, clear=False): self._fixup() tree = self.data.setdefault(language, OOBTree()) if clear: tree.clear() # A new tree always uses the newest version. if not tree: version = self.version[language] = 2 else: version = self.version.get(language, 1) order = self.order.setdefault(language, IOBTree()) count = self.count.get(language, 0) if clear: order.clear() count = 0 # Always migrate to newest version. if version == 1: def fix(path): return path.replace(LEGACY_PATH_SEPARATOR, PATH_SEPARATOR) for i in list(order): path = order[i] order[i] = fix(path) for path in list(tree): value = tree.pop(path) tree[fix(path)] = value version = self.version[language] = 2 logger.info( "Taxonomy '%s' upgraded to version %d for language '%s'." % ( self.name, version, language ) ) # Make sure we update the modification time. self.data[language] = tree # The following structure is used to expunge updated entries. inv = {} if not clear: for i, key in order.items(): inv[key] = i seen = set() for key, value in items: if key in seen: logger.warning("Duplicate key entry: %r" % (key, )) seen.add(key) update = key in tree tree[key] = value order[count] = key count += 1 # If we're updating, then we have to pop out the old ordering # information in order to maintain relative ordering of new items. if update: i = inv.get(key) if i is not None: del order[i] self.count[language] = count
class MemberDataTool(UniqueObject, SimpleItem, PropertyManager): """ This tool wraps user objects, making them act as Member objects. """ id = 'portal_memberdata' meta_type = 'CMF Member Data Tool' _properties = ( { 'id': 'email', 'type': 'string', 'mode': 'w' }, { 'id': 'portal_skin', 'type': 'string', 'mode': 'w' }, { 'id': 'listed', 'type': 'boolean', 'mode': 'w' }, { 'id': 'login_time', 'type': 'date', 'mode': 'w' }, { 'id': 'last_login_time', 'type': 'date', 'mode': 'w' }, { 'id': 'fullname', 'type': 'string', 'mode': 'w' }, ) email = '' fullname = '' last_login_time = DateTime('1970/01/01 00:00:00 UTC') # epoch listed = False login_time = DateTime('1970/01/01 00:00:00 UTC') # epoch portal_skin = '' security = ClassSecurityInfo() manage_options = (({ 'label': 'Overview', 'action': 'manage_overview' }, { 'label': 'Contents', 'action': 'manage_showContents' }) + PropertyManager.manage_options + SimpleItem.manage_options) # # ZMI methods # security.declareProtected(ManagePortal, 'manage_overview') manage_overview = DTMLFile('explainMemberDataTool', _dtmldir) security.declareProtected(ViewManagementScreens, 'manage_showContents') manage_showContents = DTMLFile('memberdataContents', _dtmldir) def __init__(self): self._members = OOBTree() # # 'portal_memberdata' interface methods # @security.private def getMemberDataContents(self): ''' Return the number of members stored in the _members BTree and some other useful info ''' mtool = getUtility(IMembershipTool) members = self._members user_list = mtool.listMemberIds() member_list = members.keys() member_count = len(members) orphan_count = 0 for member in member_list: if member not in user_list: orphan_count = orphan_count + 1 return [{'member_count': member_count, 'orphan_count': orphan_count}] @security.private def searchMemberData(self, search_param, search_term, attributes=()): """ Search members. """ res = [] if not search_param: return res mtool = getUtility(IMembershipTool) if len(attributes) == 0: attributes = ('id', 'email') if search_param == 'username': search_param = 'id' for user_id in self._members.keys(): u = mtool.getMemberById(user_id) if u is not None: memberProperty = u.getProperty searched = memberProperty(search_param, None) if searched is not None and searched.find(search_term) != -1: user_data = {} for desired in attributes: if desired == 'id': user_data['username'] = memberProperty(desired, '') else: user_data[desired] = memberProperty(desired, '') res.append(user_data) return res @security.private def searchMemberDataContents(self, search_param, search_term): """ Search members. This method will be deprecated soon. """ res = [] if search_param == 'username': search_param = 'id' mtool = getUtility(IMembershipTool) for member_id in self._members.keys(): user_wrapper = mtool.getMemberById(member_id) if user_wrapper is not None: memberProperty = user_wrapper.getProperty searched = memberProperty(search_param, None) if searched is not None and searched.find(search_term) != -1: res.append({ 'username': memberProperty('id'), 'email': memberProperty('email', '') }) return res @security.private def pruneMemberDataContents(self): """ Delete data contents of all members not listet in acl_users. """ mtool = getUtility(IMembershipTool) members = self._members user_list = mtool.listMemberIds() for member_id in list(members.keys()): if member_id not in user_list: del members[member_id] @security.private def wrapUser(self, u): ''' If possible, returns the Member object that corresponds to the given User object. ''' return getMultiAdapter((u, self), IMember) @security.private def registerMemberData(self, m, id): """ Add the given member data to the _members btree. """ self._members[id] = aq_base(m) @security.private def deleteMemberData(self, member_id): """ Delete member data of specified member. """ members = self._members if member_id in members: del members[member_id] return 1 else: return 0
def __init__(self): self.data = OOBTree()
def _initBTrees(self): self._tree = OOBTree() self._count = Length() self._mt_index = OOBTree()
class ChallengeProtocolChooser(BasePlugin): """ PAS plugin for choosing challenger protocol based on request """ meta_type = 'Challenge Protocol Chooser Plugin' security = ClassSecurityInfo() manage_options = (({ 'label': 'Mapping', 'action': 'manage_editProtocolMapping' }, ) + BasePlugin.manage_options) def __init__(self, id, title=None, mapping=None): self._id = self.id = id self.title = title self._map = OOBTree() if mapping is not None: self.manage_updateProtocolMapping(mapping=mapping) security.declarePrivate('chooseProtocols') def chooseProtocols(self, request): pas_instance = self._getPAS() plugins = pas_instance._getOb('plugins') sniffers = plugins.listPlugins(IRequestTypeSniffer) for sniffer_id, sniffer in sniffers: request_type = sniffer.sniffRequestType(request) if request_type is not None: return self._getProtocolsFor(request_type) def _getProtocolsFor(self, request_type): label = _request_type_bmap.get(request_type, None) if label is None: return return self._map.get(label, None) def _listProtocols(self): pas_instance = self._getPAS() plugins = pas_instance._getOb('plugins') challengers = plugins.listPlugins(IChallengePlugin) found = [] for challenger_id, challenger in challengers: protocol = getattr(challenger, 'protocol', challenger_id) if protocol not in found: found.append(protocol) return found manage_editProtocolMappingForm = PageTemplateFile( 'www/cpcEdit', globals(), __name__='manage_editProtocolMappingForm') def manage_editProtocolMapping(self, REQUEST=None): """ Edit Protocol Mapping """ info = [] available_protocols = self._listProtocols() request_types = listRequestTypesLabels() request_types.sort() for label in request_types: settings = [] select_any = False info.append({'label': label, 'settings': settings}) protocols = self._map.get(label, None) if not protocols: select_any = True for protocol in available_protocols: selected = False if protocols and protocol in protocols: selected = True settings.append({ 'label': protocol, 'selected': selected, 'value': protocol, }) settings.insert(0, { 'label': '(any)', 'selected': select_any, 'value': '', }) return self.manage_editProtocolMappingForm(info=info, REQUEST=REQUEST) def manage_updateProtocolMapping(self, mapping, REQUEST=None): """ Update mapping of Request Type to Protocols """ for key, value in mapping.items(): value = filter(None, value) if not value: if key in self._map: del self._map[key] else: self._map[key] = value if REQUEST is not None: REQUEST['RESPONSE'].redirect('%s/manage_editProtocolMapping' '?manage_tabs_message=' 'Protocol+Mappings+Changed.' % self.absolute_url())
def _constructAnnotatedHistory(self, max=10): """Reconstruct historical revisions of archetypes objects Merges revisions to self with revisions to archetypes-related items in __annotations__. Yields at most max recent revisions. """ # All relevant historical states by transaction id # For every tid, keep a dict with object revisions, keyed on annotation # id, or None for self and '__annotations__' for the ann OOBTree # Initialize with self revisions history = dict([(tid, {None: rev}) for (tid, rev) in _objectRevisions(self, max)]) if not getattr(self, '__annotations__', None): # No annotations, just return the history we have for self # Note that if this object had __annotations__ in a past # transaction they will be ignored! Working around this is a # YAGNI I think though. for tid in sorted(history.keys()): yield history[tid][None] return # Now find all __annotation__ revisions, and the annotation keys # used in those. annotation_key_objects = {} isatkey = lambda k, aak=AT_ANN_KEYS: filter(k.startswith, aak) # Loop over max revisions of the __annotations__ object to retrieve # all keys (and more importantly, their objects so we can get revisions) for tid, rev in _objectRevisions(self.__annotations__, max): history.setdefault(tid, {})['__annotations__'] = rev revision = rev['object'] for key in itertools.ifilter(isatkey, revision.iterkeys()): if not hasattr(revision[key], '_p_jar'): continue # Not persistent if key not in annotation_key_objects: annotation_key_objects[key] = revision[key] # For all annotation keys, get their revisions for key, obj in annotation_key_objects.iteritems(): for tid, rev in _objectRevisions(obj, max): history.setdefault(tid, {})[key] = rev del annotation_key_objects # Now we merge the annotation and object revisions into one for each # transaction id, and yield the results tids = sorted(history.iterkeys(), reverse=True) def find_revision(tids, key): """First revision of key in a series of tids""" has_revision = lambda t, h=history, k=key: k in h[t] next_tid = itertools.ifilter(has_revision, tids).next() return history[next_tid][key] for i, tid in enumerate(tids[:max]): revision = find_revision(tids[i:], None) obj = revision['object'] # Track size to maintain correct metadata size = revision['size'] anns_rev = find_revision(tids[i:], '__annotations__') size += anns_rev['size'] anns = anns_rev['object'] # We use a temporary OOBTree to avoid _p_jar complaints from the # transaction machinery tempbtree = OOBTree() tempbtree.__setstate__(anns.__getstate__()) # Find annotation revisions and insert for key in itertools.ifilter(isatkey, tempbtree.iterkeys()): if not hasattr(tempbtree[key], '_p_jar'): continue # Not persistent value_rev = find_revision(tids[i:], key) size += value_rev['size'] tempbtree[key] = value_rev['object'] # Now transfer the tembtree state over to anns, effectively # bypassing the transaction registry while maintaining BTree # integrity anns.__setstate__(tempbtree.__getstate__()) anns._p_changed = 0 del tempbtree # Do a similar hack to set anns on the main object state = obj.__getstate__() state['__annotations__'] = anns obj.__setstate__(state) obj._p_changed = 0 # Update revision metadata if needed if revision['tid'] != tid: # any other revision will do; only size and object are unique revision = history[tid].values()[0].copy() revision['object'] = obj # Correct size based on merged records revision['size'] = size # clean up as we go del history[tid] yield revision
def __init__(self): # {token: user id, ...} self._token2uid = OOBTree() # {user id: token, ...} self._uid2token = OOBTree() return
class DataBucketStream(Document): """ Represents data stored in many small files inside a "stream". Each file is "addressed" by its key similar to dict. """ meta_type = 'ERP5 Data Bucket Stream' portal_type = 'Data Bucket Stream' add_permission = Permissions.AddPortalContent # Declarative security security = ClassSecurityInfo() security.declareObjectProtected(Permissions.AccessContentsInformation) # Declarative properties property_sheets = (PropertySheet.CategoryCore, PropertySheet.SortIndex) def __init__(self, id, **kw): self.initBucketTree() self.initIndexTree() Document.__init__(self, id, **kw) def __len__(self): return len(self._tree) def initBucketTree(self): """ Initialize the Bucket Tree """ self._tree = OOBTree() def initIndexTree(self): """ Initialize the Index Tree """ self._long_index_tree = LOBTree() def getMaxKey(self, key=None): """ Return the maximum key """ try: return self._tree.maxKey(key) except ValueError: return None def getMaxIndex(self, index=None): """ Return the maximum index """ try: return self._long_index_tree.maxKey(index) except ValueError: return None def getMinKey(self, key=None): """ Return the minimum key """ try: return self._tree.minKey(key) except ValueError: return None def getMinIndex(self, index=None): """ Return the minimum key """ try: return self._long_index_tree.minKey(index) except ValueError: return None def _getOb(self, id, *args, **kw): return None def getBucketByKey(self, key=None): """ Get one bucket """ return self._tree[key].value def getBucketByIndex(self, index=None): """ Get one bucket """ key = self._long_index_tree[index] return self.getBucketByKey(key).value def getBucket(self, key): log('DeprecationWarning: Please use getBucketByKey') return self.getBucketByKey(key) def hasBucketKey(self, key): """ Wether bucket with such key exists """ return key in self._tree def hasBucketIndex(self, index): """ Wether bucket with such index exists """ return self._long_index_tree.has_key(index) def insertBucket(self, key, value): """ Insert one bucket """ try: count = self._long_index_tree.maxKey() + 1 except ValueError: count = 0 except AttributeError: pass try: self._long_index_tree.insert(count, key) except AttributeError: pass value = PersistentString(value) is_new_key = self._tree.insert(key, value) if not is_new_key: self.log("Reingestion of same key") self._tree[key] = value def getBucketKeySequenceByKey(self, start_key=None, stop_key=None, count=None, exclude_start_key=False, exclude_stop_key=False): """ Get a lazy sequence of bucket keys """ sequence = self._tree.keys(min=start_key, max=stop_key, excludemin=exclude_start_key, excludemax=exclude_stop_key) if count is None: return sequence return sequence[:count] def getBucketKeySequenceByIndex(self, start_index=None, stop_index=None, count=None, exclude_start_index=False, exclude_stop_index=False): """ Get a lazy sequence of bucket keys """ sequence = self._long_index_tree.values(min=start_index, max=stop_index, excludemin=exclude_start_index, excludemax=exclude_stop_index) if count is None: return sequence return sequence[:count] def getBucketKeySequence(self, start_key=None, count=None): log('DeprecationWarning: Please use getBucketKeySequenceByKey') return self.getBucketKeySequenceByKey(start_key=start_key, count=count) def getBucketIndexKeySequenceByIndex(self, start_index=None, stop_index=None, count=None, exclude_start_index=False, exclude_stop_index=False): """ Get a lazy sequence of bucket keys """ sequence = self._long_index_tree.items(min=start_index, max=stop_index, excludemin=exclude_start_index, excludemax=exclude_stop_index) if count is not None: sequence = sequence[:count] return IndexKeySequence(self, sequence) def getBucketIndexSequenceByIndex(self, start_index=None, stop_index=None, count=None, exclude_start_index=False, exclude_stop_index=False): """ Get a lazy sequence of bucket keys """ sequence = self._long_index_tree.keys(min=start_index, max=stop_index, excludemin=exclude_start_index, excludemax=exclude_stop_index) if count is None: return sequence return sequence[:count] def getBucketValueSequenceByKey(self, start_key=None, stop_key=None, count=None, exclude_start_key=False, exclude_stop_key=False): """ Get a lazy sequence of bucket values """ sequence = self._tree.values(min=start_key, max=stop_key, excludemin=exclude_start_key, excludemax=exclude_stop_key) if count is None: return sequence return sequence[:count] def getBucketValueSequenceByIndex(self, start_index=None, stop_index=None, count=None, exclude_start_index=False, exclude_stop_index=False): """ Get a lazy sequence of bucket values """ sequence = self._long_index_tree.values(min=start_index, max=stop_index, excludemin=exclude_start_index, excludemax=exclude_stop_index) if count is not None: sequence = sequence[:count] return IndexValueSequence(self, sequence) def getBucketValueSequence(self, start_key=None, count=None): log('DeprecationWarning: Please use getBucketValueSequenceByKey') return self.getBucketValueSequenceByKey(start_key=start_key, count=count) def getBucketKeyItemSequenceByKey(self, start_key=None, stop_key=None, count=None, exclude_start_key=False, exclude_stop_key=False): """ Get a lazy sequence of bucket items """ sequence = self._tree.items(min=start_key, max=stop_key, excludemin=exclude_start_key, excludemax=exclude_stop_key) if count is None: return sequence return sequence[:count] def getBucketItemSequence(self, start_key=None, count=None, exclude_start_key=False): log('DeprecationWarning: Please use getBucketKeyItemSequenceByKey') return self.getBucketKeyItemSequenceByKey( start_key=start_key, count=count, exclude_start_key=exclude_start_key) def getBucketIndexItemSequenceByIndex(self, start_index=None, stop_index=None, count=None, exclude_start_index=False, exclude_stop_index=False): """ Get a lazy sequence of bucket items """ sequence = self._long_index_tree.items(min=start_index, max=stop_index, excludemin=exclude_start_index, excludemax=exclude_stop_index) if count is not None: sequence = sequence[:count] return IndexItemSequence(self, sequence) def getBucketIndexKeyItemSequenceByIndex(self, start_index=None, stop_index=None, count=None, exclude_start_index=False, exclude_stop_index=False): """ Get a lazy sequence of bucket items """ sequence = self._long_index_tree.items(min=start_index, max=stop_index, excludemin=exclude_start_index, excludemax=exclude_stop_index) if count is not None: sequence = sequence[:count] return IndexKeyItemSequence(self, sequence) def getItemList(self): """ Return a list of all key, value pairs """ return [item for item in self._tree.items()] def getKeyList(self): """ Return a list of all keys """ return [key for key in self._tree.keys()] def getIndexList(self): """ Return a list of all indexes """ return [key for key in self._long_index_tree.keys()] def getIndexKeyTupleList(self): """ Return a list of all indexes """ return [key for key in self._long_index_tree.items()] def getMd5sum(self, key): """ Get hexdigest of bucket. """ h = hashlib.md5() h.update(self.getBucketByKey(key)) return h.hexdigest() def delBucketByKey(self, key): """ Remove the bucket. """ del self._tree[key] for index, my_key in list(self.getBucketIndexKeySequenceByIndex()): if my_key == key: del self._long_index_tree[index] def delBucketByIndex(self, index): """ Remove the bucket. """ key = self._long_index_tree[index] del self._tree[key] del self._long_index_tree[index] def rebuildIndexTreeByKeyOrder(self): """ Clear and rebuild the index tree by order of keys """ self.initIndexTree() for count, key in enumerate(self.getBucketKeySequenceByKey()): self._long_index_tree.insert(count, key)
class ZODBGroupManager( BasePlugin ): """ PAS plugin for managing groups, and groups of groups in the ZODB """ meta_type = 'ZODB Group Manager' security = ClassSecurityInfo() def __init__(self, id, title=None): self._id = self.id = id self.title = title self._groups = OOBTree() self._principal_groups = OOBTree() # # IGroupEnumerationPlugin implementation # security.declarePrivate( 'enumerateGroups' ) def enumerateGroups( self , id=None , title=None , exact_match=False , sort_by=None , max_results=None , **kw ): """ See IGroupEnumerationPlugin. """ group_info = [] group_ids = [] plugin_id = self.getId() if isinstance( id, basestring ): id = [ id ] if isinstance( title, basestring ): title = [ title ] if exact_match and ( id or title ): if id: group_ids.extend( id ) elif title: group_ids.extend( title ) if group_ids: group_filter = None else: # Searching group_ids = self.listGroupIds() group_filter = _ZODBGroupFilter( id, title, **kw ) for group_id in group_ids: if self._groups.get( group_id, None ): e_url = '%s/manage_groups' % self.getId() p_qs = 'group_id=%s' % group_id m_qs = 'group_id=%s&assign=1' % group_id info = {} info.update( self._groups[ group_id ] ) info[ 'pluginid' ] = plugin_id info[ 'properties_url' ] = '%s?%s' % ( e_url, p_qs ) info[ 'members_url' ] = '%s?%s' % ( e_url, m_qs ) info[ 'id' ] = '%s%s' % (self.prefix, info['id']) if not group_filter or group_filter( info ): group_info.append( info ) return tuple( group_info ) # # IGroupsPlugin implementation # security.declarePrivate( 'getGroupsForPrincipal' ) def getGroupsForPrincipal( self, principal, request=None ): """ See IGroupsPlugin. """ unadorned = self._principal_groups.get( principal.getId(), () ) return tuple(['%s%s' % (self.prefix, x) for x in unadorned]) # # (notional)IZODBGroupManager interface # security.declareProtected( ManageGroups, 'listGroupIds' ) def listGroupIds( self ): """ -> ( group_id_1, ... group_id_n ) """ return self._groups.keys() security.declareProtected( ManageGroups, 'listGroupInfo' ) def listGroupInfo( self ): """ -> ( {}, ...{} ) o Return one mapping per group, with the following keys: - 'id' """ return self._groups.values() security.declareProtected( ManageGroups, 'getGroupInfo' ) def getGroupInfo( self, group_id ): """ group_id -> {} """ return self._groups[ group_id ] security.declarePrivate( 'addGroup' ) def addGroup( self, group_id, title=None, description=None ): """ Add 'group_id' to the list of groups managed by this object. o Raise KeyError on duplicate. """ if self._groups.get( group_id ) is not None: raise KeyError, 'Duplicate group ID: %s' % group_id self._groups[ group_id ] = { 'id' : group_id , 'title' : title , 'description' : description } security.declarePrivate( 'updateGroup' ) def updateGroup( self, group_id, title=None, description=None ): """ Update properties for 'group_id' o Raise KeyError if group_id doesn't already exist. """ if title is not None: self._groups[group_id]['title'] = title if description is not None: self._groups[group_id]['description'] = description self._groups[group_id] = self._groups[group_id] security.declarePrivate( 'removeGroup' ) def removeGroup( self, group_id ): """ Remove 'role_id' from the list of roles managed by this object, removing assigned members from it before doing so. o Raise KeyError if 'group_id' doesn't already exist. """ for principal_id in self._principal_groups.keys(): self.removePrincipalFromGroup( principal_id, group_id ) del self._groups[ group_id ] # # Group assignment API # security.declareProtected( ManageGroups, 'listAvailablePrincipals' ) def listAvailablePrincipals( self, group_id, search_id ): """ Return a list of principal IDs to that can belong to the group. o If supplied, 'search_id' constrains the principal IDs; if not, return empty list. o Omit principals with existing assignments. """ result = [] if search_id: # don't bother searching if no criteria parent = aq_parent( self ) for info in parent.searchPrincipals( max_results=20 , sort_by='id' , id=search_id , exact_match=False ): id = info[ 'id' ] title = info.get( 'title', id ) if ( group_id not in self._principal_groups.get( id, () ) and group_id != id ): result.append( ( id, title ) ) return result security.declareProtected( ManageGroups, 'listAssignedPrincipals' ) def listAssignedPrincipals( self, group_id ): """ Return a list of principal IDs belonging to a group. """ result = [] for k, v in self._principal_groups.items(): if group_id in v: parent = aq_parent( self ) info = parent.searchPrincipals( id=k, exact_match=True ) if len( info ) == 0: title = '<%s: not found>' % k else: # always use the title of the first principal found title = info[0].get( 'title', k ) result.append( ( k, title ) ) return result security.declareProtected( ManageGroups, 'addPrincipalToGroup' ) def addPrincipalToGroup( self, principal_id, group_id, REQUEST=None ): """ Add a principal to a group. o Return a boolean indicating whether a new assignment was created. o Raise KeyError if 'group_id' is unknown. """ group_info = self._groups[ group_id ] # raise KeyError if unknown! current = self._principal_groups.get( principal_id, () ) already = group_id in current if not already: new = current + ( group_id, ) self._principal_groups[ principal_id ] = new self._invalidatePrincipalCache( principal_id ) return not already addPrincipalToGroup = postonly(addPrincipalToGroup) security.declareProtected( ManageGroups, 'removePrincipalFromGroup' ) def removePrincipalFromGroup( self, principal_id, group_id, REQUEST=None ): """ Remove a prinicpal from from a group. o Return a boolean indicating whether the principal was already a member of the group. o Raise KeyError if 'group_id' is unknown. o Ignore requests to remove a principal if not already a member of the group. """ group_info = self._groups[ group_id ] # raise KeyError if unknown! current = self._principal_groups.get( principal_id, () ) new = tuple( [ x for x in current if x != group_id ] ) already = current != new if already: self._principal_groups[ principal_id ] = new self._invalidatePrincipalCache( principal_id ) return already removePrincipalFromGroup = postonly(removePrincipalFromGroup) # # ZMI # manage_options = ( ( { 'label': 'Groups', 'action': 'manage_groups', } , ) + BasePlugin.manage_options ) security.declarePublic( 'manage_widgets' ) manage_widgets = PageTemplateFile( 'www/zuWidgets' , globals() , __name__='manage_widgets' ) security.declareProtected( ManageGroups, 'manage_groups' ) manage_groups = PageTemplateFile( 'www/zgGroups' , globals() , __name__='manage_groups' ) security.declareProtected( ManageGroups, 'manage_twoLists' ) manage_twoLists = PageTemplateFile( '../www/two_lists' , globals() , __name__='manage_twoLists' ) security.declareProtected( ManageGroups, 'manage_addGroup' ) def manage_addGroup( self , group_id , title=None , description=None , RESPONSE=None ): """ Add a group via the ZMI. """ self.addGroup( group_id, title, description ) message = 'Group+added' if RESPONSE is not None: RESPONSE.redirect( '%s/manage_groups?manage_tabs_message=%s' % ( self.absolute_url(), message ) ) security.declareProtected( ManageGroups, 'manage_updateGroup' ) def manage_updateGroup( self , group_id , title , description , RESPONSE=None ): """ Update a group via the ZMI. """ self.updateGroup( group_id, title, description ) message = 'Group+updated' if RESPONSE is not None: RESPONSE.redirect( '%s/manage_groups?manage_tabs_message=%s' % ( self.absolute_url(), message ) ) security.declareProtected( ManageGroups, 'manage_removeGroups' ) def manage_removeGroups( self , group_ids , RESPONSE=None , REQUEST=None ): """ Remove one or more groups via the ZMI. """ group_ids = filter( None, group_ids ) if not group_ids: message = 'no+groups+selected' else: for group_id in group_ids: self.removeGroup( group_id ) message = 'Groups+removed' if RESPONSE is not None: RESPONSE.redirect( '%s/manage_groups?manage_tabs_message=%s' % ( self.absolute_url(), message ) ) manage_removeGroups = postonly(manage_removeGroups) security.declareProtected( ManageGroups, 'manage_addPrincipalsToGroup' ) def manage_addPrincipalsToGroup( self , group_id , principal_ids , RESPONSE=None , REQUEST=None ): """ Add one or more principals to a group via the ZMI. """ assigned = [] for principal_id in principal_ids: if self.addPrincipalToGroup( principal_id, group_id ): assigned.append( principal_id ) if not assigned: message = 'Principals+already+members+of+%s' % group_id else: message = '%s+added+to+%s' % ( '+'.join( assigned ) , group_id ) if RESPONSE is not None: RESPONSE.redirect( ( '%s/manage_groups?group_id=%s&assign=1' + '&manage_tabs_message=%s' ) % ( self.absolute_url(), group_id, message ) ) manage_addPrincipalsToGroup = postonly(manage_addPrincipalsToGroup) security.declareProtected( ManageGroups , 'manage_removePrincipalsFromGroup' ) def manage_removePrincipalsFromGroup( self , group_id , principal_ids , RESPONSE=None , REQUEST=None ): """ Remove one or more principals from a group via the ZMI. """ removed = [] for principal_id in principal_ids: if self.removePrincipalFromGroup( principal_id, group_id ): removed.append( principal_id ) if not removed: message = 'Principals+not+in+group+%s' % group_id else: message = 'Principals+%s+removed+from+%s' % ( '+'.join( removed ) , group_id ) if RESPONSE is not None: RESPONSE.redirect( ( '%s/manage_groups?group_id=%s&assign=1' + '&manage_tabs_message=%s' ) % ( self.absolute_url(), group_id, message ) ) manage_removePrincipalsFromGroup = postonly(manage_removePrincipalsFromGroup)
def __init__(self): super().__init__() self._btree = BTree() self.build()
def __init__(self, id, title=None, mapping=None): self._id = self.id = id self.title = title self._map = OOBTree() if mapping is not None: self.manage_updateProtocolMapping(mapping=mapping)
class InvertedIndex: def __init__(self): super().__init__() self._btree = BTree() self.build() def build(self): """ Reads files one-by-one and builds the index Arguments: None Returns: None """ if os.path.isfile("InvertedIndex"): print("Loading the Inverted Index from file") with open("InvertedIndex", "rb") as file: self._btree = pickle.load(file) else: print("Building the Inverted Index") dirPath = os.path.dirname(os.path.realpath(__file__)) dataPath = os.path.realpath(os.path.join(dirPath, "..", "data")) files = [ os.path.join(dataPath, file) for file in sorted(os.listdir(dataPath)) ] for file in files: snippets = getSnippets(file) for index, snippet in enumerate(snippets): filename = int(os.path.split(file)[1].split(".csv")[0]) docId = (filename, index + 2) tokens, snippetMetadata = preProcess(snippet) self.updateIndex(tokens, docId) self.sortPostingLists() sys.setrecursionlimit(10000) with open("InvertedIndex", "wb") as file: print("Saving the Inverted Index to file") pickle.dump(self._btree, file) def sortPostingLists(self): words = list(self.getKeys()) for word in words: postingList = self._btree.get(word) postingList.sort(key=lambda x: ((10000 * x[0][0]) + x[0][1])) def getKeys(self): return self._btree.keys() def getValues(self): return self._btree.values() def getPostingListCollection(self, termList): result = [] for term in termList: postingList = self._btree.get(term) if (postingList != None): result.append(postingList) else: result.append([]) return result def documentUnion(self, postingListCollection): docList = set() for postingList in postingListCollection: for doc in postingList: docList.add(doc[0]) return docList def getDocuments(self, termList, queryType=0, queryMetadata=defaultdict(int)): """ Finds the documents containing the terms Arguments: termList - List of query terms Returns: List of document numbers """ docList = [] # Union if (queryType == 0): postingListCollection = self.getPostingListCollection(termList) docList = self.documentUnion(postingListCollection) # intersection/positional elif (queryType == 1): postingListCollection = self.getPostingListCollection(termList) docList = self.documentIntersection(postingListCollection, queryMetadata) return docList def documentIntersection(self, documents, queryMetadata): while (len(documents) > 1): isPositional = False diff = queryMetadata[(len(documents) - 2, len(documents) - 1)] if (diff != 0): isPositional = True list1 = documents.pop() list2 = documents.pop() ptr1 = 0 ptr2 = 0 intersection = [] while (ptr1 < len(list1) and ptr2 < len(list2)): fileNo1 = list1[ptr1][0][0] rowNo1 = list1[ptr1][0][1] fileNo2 = list2[ptr2][0][0] rowNo2 = list2[ptr2][0][1] if (fileNo1 == fileNo2 and rowNo1 == rowNo2): if (isPositional): for postion1 in list1[ptr1][1]: for postion2 in list2[ptr2][1]: if ((postion1 - postion2 + 1) == diff): if (len(intersection) == 0 or intersection[-1][0] != list1[ptr1][0]): intersection.append( (list1[ptr1][0], [postion2])) elif (intersection[-1][0] == list1[ptr1][0] ): intersection[-1][1].append(postion2) else: intersection.append((list1[ptr1][0], list2[ptr2][1])) ptr1 += 1 ptr2 += 1 elif (fileNo1 == fileNo2 and rowNo1 < rowNo2): ptr1 += 1 elif (fileNo1 == fileNo2 and rowNo1 > rowNo2): ptr2 += 1 elif (fileNo1 < fileNo2): ptr1 += 1 elif (fileNo1 > fileNo2): ptr2 += 1 documents.append(intersection) docNolist = [x[0] for x in documents[0]] return docNolist def updateIndex(self, docTokens, docId): """ Updates the inverted index Arguments: docText - Document to be added to the index docId - ID of the document Returns: None """ for word, wordIndex in docTokens: postingList = self._btree.get(word) if postingList is not None: lastdocId = postingList[-1][0] if docId == lastdocId: postingList[-1][1].append(wordIndex) else: postingList.append((docId, [wordIndex])) else: postingList = [(docId, [wordIndex])] self._btree.insert(word, postingList)
class PathIndex(Persistent, SimpleItem): """Index for paths returned by getPhysicalPath. A path index stores all path components of the physical path of an object. Internal datastructure: - a physical path of an object is split into its components - every component is kept as a key of a OOBTree in self._indexes - the value is a mapping 'level of the path component' to 'all docids with this path component on this level' """ implements(IPathIndex, IUniqueValueIndex, ISortIndex) meta_type = "PathIndex" query_options = ('query', 'level', 'operator') manage_options = ({'label': 'Settings', 'action': 'manage_main'}, ) def __init__(self, id, caller=None): self.id = id self.operators = ('or', 'and') self.useOperator = 'or' self.clear() def __len__(self): return self._length() # IPluggableIndex implementation def getEntryForObject(self, docid, default=None): """ See IPluggableIndex. """ try: return self._unindex[docid] except KeyError: return default def getIndexSourceNames(self): """ See IPluggableIndex. """ return ( self.id, 'getPhysicalPath', ) def index_object(self, docid, obj, threshold=100): """ See IPluggableIndex. """ f = getattr(obj, self.id, None) if f is not None: if safe_callable(f): try: path = f() except AttributeError: return 0 else: path = f if not isinstance(path, (str, tuple)): raise TypeError( 'path value must be string or tuple of strings') else: try: path = obj.getPhysicalPath() except AttributeError: return 0 if isinstance(path, (list, tuple)): path = '/' + '/'.join(path[1:]) comps = filter(None, path.split('/')) old_value = self._unindex.get(docid, None) if old_value == path: return 0 if old_value is None: self._length.change(1) for i in range(len(comps)): self.insertEntry(comps[i], docid, i) self._unindex[docid] = path return 1 def unindex_object(self, docid): """ See IPluggableIndex. """ if docid not in self._unindex: LOG.debug('Attempt to unindex nonexistent document with id %s' % docid) return comps = self._unindex[docid].split('/') for level in range(len(comps[1:])): comp = comps[level + 1] try: self._index[comp][level].remove(docid) if not self._index[comp][level]: del self._index[comp][level] if not self._index[comp]: del self._index[comp] except KeyError: LOG.debug('Attempt to unindex document with id %s failed' % docid) self._length.change(-1) del self._unindex[docid] def _apply_index(self, request): """ See IPluggableIndex. o Unpacks args from catalog and mapps onto '_search'. """ record = parseIndexRequest(request, self.id, self.query_options) if record.keys is None: return None level = record.get("level", 0) operator = record.get('operator', self.useOperator).lower() # depending on the operator we use intersection of union if operator == "or": set_func = union else: set_func = intersection res = None for k in record.keys: rows = self._search(k, level) res = set_func(res, rows) if res: return res, (self.id, ) else: return IISet(), (self.id, ) def numObjects(self): """ See IPluggableIndex. """ return len(self._unindex) def indexSize(self): """ See IPluggableIndex. """ return len(self) def clear(self): """ See IPluggableIndex. """ self._depth = 0 self._index = OOBTree() self._unindex = IOBTree() self._length = Length(0) # IUniqueValueIndex implementation def hasUniqueValuesFor(self, name): """ See IUniqueValueIndex. """ return name == self.id def uniqueValues(self, name=None, withLength=0): """ See IUniqueValueIndex. """ if name in (None, self.id, 'getPhysicalPath'): if withLength: for key in self._index: yield key, len(self._search(key, -1)) else: for key in self._index.keys(): yield key # ISortIndex implementation def keyForDocument(self, documentId): """ See ISortIndex. """ return self._unindex.get(documentId) def documentToKeyMap(self): """ See ISortIndex. """ return self._unindex # IPathIndex implementation. def insertEntry(self, comp, id, level): """ See IPathIndex """ if not self._index.has_key(comp): self._index[comp] = IOBTree() if not self._index[comp].has_key(level): self._index[comp][level] = IITreeSet() self._index[comp][level].insert(id) if level > self._depth: self._depth = level # Helper methods def _search(self, path, default_level=0): """ Perform the actual search. ``path`` a string representing a relative URL, or a part of a relative URL, or a tuple ``(path, level)``. In the first two cases, use ``default_level`` as the level for the search. ``default_level`` the level to use for non-tuple queries. ``level >= 0`` => match ``path`` only at the given level. ``level < 0`` => match ``path`` at *any* level """ if isinstance(path, str): level = default_level else: level = int(path[1]) path = path[0] if level < 0: # Search at every level, return the union of all results return multiunion([ self._search(path, level) for level in xrange(self._depth + 1) ]) comps = filter(None, path.split('/')) if level + len(comps) - 1 > self._depth: # Our search is for a path longer than anything in the index return IISet() if len(comps) == 0: return IISet(self._unindex.keys()) results = None for i, comp in reversed(list(enumerate(comps))): if not self._index.get(comp, {}).has_key(level + i): return IISet() results = intersection(results, self._index[comp][level + i]) return results manage = manage_main = DTMLFile('dtml/managePathIndex', globals()) manage_main._setName('manage_main')
# @app.before_request # def set_db_defaults(): # if 'userdb' not in db: # db['userdb'] = List() # @app.route('/logout') # def logout(): # session.pop('logged_in', None) # flash('You were logged out') # return redirect(url_for('show_entries')) #Ensure that a 'userdb' key is present #in the root if not dbroot.has_key('userdb'): from BTrees.OOBTree import OOBTree dbroot['userdb'] = OOBTree() # userdb is a <BTrees.OOBTree.OOBTree object at some location> userdb = dbroot['userdb'] print "user db init:", userdb, type(userdb) @app.route('/index', methods=['GET', 'POST']) def index(): form = LoginForm(request.form) if 'username' in session: return 'Logged in as %s' % escape(session['username']) # return render_template('forms/register.html', form = form) return 'You are not logged in' @app.route('/', methods=['GET', 'POST'])
def __init__(self, id, title=None): self._id = self.id = id self.title = title self._groups = OOBTree() self._principal_groups = OOBTree()
def _migrateVersionPolicies(self): if not isinstance(self._policy_defs, OOBTree): btree_defs = OOBTree() for obj_id, title in self._policy_defs.items(): btree_defs[obj_id]=VersionPolicy(obj_id, title) self._policy_defs = btree_defs
def get_favorites(self): if not FAVORITE_KEY in self.annotations: self.annotations[FAVORITE_KEY] = OOBTree() return self.annotations[FAVORITE_KEY]