Exemple #1
0
class RecentActivityUtility(object):
    """ Utility for recent activities """
    grok.implements(IRecentActivity)

    def __init__(self):
        self.activities = OOBTree()

    def add_activity(self, timestamp, action, user, obj, parent):
        """ Add an activity to the BTree storage """
        timestamp = int(time.time())
        activity = {'action': action,
                    'user': user,
                    'object': obj,
                    'object_url': obj.absolute_url(),
                    'parent': parent,
                    'parent_url': parent.absolute_url(),
                    }
        self.activities.insert(timestamp, activity)
        return timestamp

    def get_recent_activity(self, items=None):
        """ Get the activities stored in the BTree """
        if self.activities:
            if items:
                return sorted(self.activities.items(), reverse=True)[:items]
            else:
                return sorted(self.activities.items(), reverse=True)
Exemple #2
0
class PersitentOOBTree(Persistent):
    """A persitent wrapper around a OOBTree"""

    def __init__(self):
        self._data = OOBTree()
        Persistent.__init__(self)
        self.__len = Length()

    @Lazy
    def _PersitentOOBTree__len(self):
        l = Length()
        ol = len(self._data)
        if ol > 0:
            l.change(ol)
        self._p_changed = True
        return l

    def __len__(self):
        return self.__len()

    def __setitem__(self, key, value):
        # make sure our lazy property gets set
        l = self.__len
        self._data[key] = value
        l.change(1)

    def __delitem__(self, key):
        # make sure our lazy property gets set
        l = self.__len
        del self._data[key]
        l.change(-1)

    def __iter__(self):
        return iter(self._data)

    def __getitem__(self, key):
        """See interface `IReadContainer`.
        """
        return self._data[key]

    def get(self, key, default=None):
        """See interface `IReadContainer`.
        """
        return self._data.get(key, default)

    def __contains__(self, key):
        """See interface `IReadContainer`.
        """
        return key in self._data

    has_key = __contains__

    def items(self, key=None):
        return self._data.items(key)

    def keys(self, key=None):
        return self._data.keys(key)

    def values(self, key=None):
        return self._data.values(key)
Exemple #3
0
class ShortURLStorage(Persistent):
    """Stores short codes and urls to redirect to. """
    implements(IShortURLStorage)

    def __init__(self):
        self._map = OOBTree()

    def add(self, short, target):
        self._map[short] = target

    def remove(self, short):
        if self._map.has_key(short):
            del self._map[short]

    def get(self, short, default=None):
        return self._map.get(short, default)

    def suggest(self):
        try:
            key = self._map.maxKey()
        except ValueError:
            # If the tree is empty
            return 'AAAAA'
        return _increment(key)

    def __getitem__(self, key):
        return self._map.items()[key]

    def __len__(self):
        return len(self._map)
class RecentActivityUtility(Persistent):
    """Recent Activity Utility
    """
    implements(IRecentActivityUtility)

    #activities = IOBTree()
    activities = None

    def __init__(self):
        self.activities = OOBTree()

    def addActivity(self, timestamp, action, user, fullname, object, parent):
        """Add an activity to the BTree.
        """

        timestamp = int(time.time())
        activity = {'action': action,
                    'user': user,
                    'fullname': fullname,
                    'object': object,
                    'object_url': object.absolute_url(),
                    'parent': parent,
                    'parent_url': parent.absolute_url(),
                    }
        self.activities.insert(timestamp, activity)

        #from zope.container.contained import ObjectAddedEvent
        #from zope.container.contained import notifyContainerModified
        #notify(ObjectAddedEvent(object, self.activities, str(uid)))
        #notifyContainerModified(self.activities)
        return timestamp

    def getRecentActivity(self, items=None):
        """Get all activities stored in the BTree.
        """
        if self.activities:
            if items:
                # Return activities sorted by timestamp
                return sorted(self.activities.items(), reverse=True)[:items]
            else:
                return sorted(self.activities.items(), reverse=True)

    def manage_fixupOwnershipAfterAdd(self):
        """This is needed, otherwise we get an Attribute Error
           when we try to install the product.
        """
        pass
Exemple #5
0
class CategoryIndex(Persistent):
    
    def __init__( self ):
        self._idxCategItem = OOBTree()
        
    def dump(self):
        return list(self._idxCategItem.items())
        
    def _indexConfById(self, categid, confid):
        # only the more restrictive setup is taken into account
        categid = str(categid)
        if self._idxCategItem.has_key(categid):
            res = self._idxCategItem[categid]
        else:
            res = []
        res.append(confid)
        self._idxCategItem[categid] = res
        
    def unindexConf(self, conf):
        confid = str(conf.getId())
        self.unindexConfById(confid)
    
    def unindexConfById(self, confid):
        for categid in self._idxCategItem.keys():
            if confid in self._idxCategItem[categid]:
                res = self._idxCategItem[categid]
                res.remove(confid)
                self._idxCategItem[categid] = res
        
    def reindexCateg(self, categ):
        for subcat in categ.getSubCategoryList():
            self.reindexCateg(subcat)
        for conf in categ.getConferenceList():
            self.reindexConf(conf)
        
    def reindexConf(self, conf):
        self.unindexConf(conf)
        self.indexConf(conf)

    def indexConf(self, conf):
        categs = conf.getOwnerPath()
        level = 0 
        for categ in conf.getOwnerPath():
            if conf.getFullVisibility() > level:
                self._indexConfById(categ.getId(),conf.getId())
            level+=1
        if conf.getFullVisibility() > level:
            self._indexConfById("0",conf.getId())
    
    def getItems(self, categid):
        categid = str(categid)
        if self._idxCategItem.has_key(categid):
            return self._idxCategItem[categid]
        else:
            return []
Exemple #6
0
class CategoryDateIndex(Persistent):
    
    def __init__( self ):
        self._idxCategItem = OOBTree()
        
    def dump(self):
        return map(lambda idx: (idx[0], idx[1].dump()), list(self._idxCategItem.items()))
    
    def unindexConf(self, conf):
        for owner in conf.getOwnerPath():
            self._idxCategItem[owner.getId()].unindexConf(conf)
        self._idxCategItem['0'].unindexConf(conf)

    def unindexCateg(self, categ):
        for subcat in categ.getSubCategoryList():
            self.unindexCateg(subcat)
        for conf in categ.getConferenceList():
            self.unindexConf(conf)

    def indexCateg(self, categ):
        for subcat in categ.getSubCategoryList():
            self.indexCateg(subcat)
        for conf in categ.getConferenceList():
            self.indexConf(conf)
        
    def _indexConf(self, categid, conf):
        # only the more restrictive setup is taken into account
        if self._idxCategItem.has_key(categid):
            res = self._idxCategItem[categid]
        else:
            res = CalendarIndex()
        res.indexConf(conf)
        self._idxCategItem[categid] = res

    def indexConf(self, conf):
        categs = conf.getOwnerPath()
        level = 0 
        for categ in conf.getOwnerPath():
            self._indexConf(categ.getId(), conf)
        self._indexConf("0",conf)
            
    def getObjectsIn(self, categid, sDate, eDate):
        categid = str(categid)
        if self._idxCategItem.has_key(categid):
            return self._idxCategItem[categid].getObjectsIn(sDate, eDate)
        else:
            return []

    def getObjectsStartingIn( self, categid, sDate, eDate):
        categid = str(categid)
        if self._idxCategItem.has_key(categid):
            return self._idxCategItem[categid].getObjectsStartingIn(sDate, eDate)
        else:
            return []
Exemple #7
0
class Package(Persistent):
    pypi_url = 'http://pypi.python.org/pypi/{}/json'

    def __init__(self, name):
        self.__name__ = name
        self.name = name
        self.releases = OOBTree()

    def __getitem__(self, release_name):
        return self.releases[release_name]

    def __setitem__(self, key, value):
        key = format_key(key)
        self.releases[key] = value
        self.releases[key].__parent__ = self

    @classmethod
    @cache_region('pypi', 'get_last_remote_filename')
    def get_last_remote_version(cls, proxy, package_name):
        logger.debug('Not in cache')
        if not proxy:
            return None
        try:
            result = requests.get('http://pypi.python.org/pypi/{}/json'.format(package_name))
            if not result.status_code == 200:
                return None
            result = json.loads(result.content.decode('utf-8'))
            return result['info']['version']
        except ConnectionError:
            pass
        return None

    def repository_is_up_to_date(self, last_remote_release):
        if not last_remote_release:
            return True
        remote_version = parse_version(last_remote_release)

        local_versions = [release.version for release in self.releases.values()]
        for version in local_versions:
            if parse_version(version) >= remote_version:
                return True
        return False

    @classmethod
    def by_name(cls, name, request):
        root = repository_root_factory(request)
        return root[name] if name in root else None

    def get_last_release(self):
        max_version = max([parse_version(version) for version in self.releases.keys()])
        for version, release in self.releases.items():
            if parse_version(version) == max_version:
                return release
Exemple #8
0
class PollRecord(BTreeContainer):
    implements(IPollRecord, IContentContainer)

    voteCount = None

    firstVote = None

    lastVote = None

    def __init__(self, *kv, **kw):
        super(PollRecord, self).__init__(*kv, **kw)
        self._results = OOBTree()
        self.voteCount = Length()

    def add(self, record):
        polling = getUtility(IPolling)
        for key, value in record.choices.items():
            item = self._results.get(key)
            if item is None:
                item = QuestionRecord()
                notify(ObjectCreatedEvent(item))
                self._results[key] = item
            for id in value:
                self.voteCount.change(1)
                polling.voteCount.change(1)
                item.voteCount.change(1)
                if item.firstVote is None:
                    item.firstVote = record
                item.lastVote = record
                answer = item.get(id)
                if answer:
                    answer.change(1)
                else:
                    item[id] = Length(1)
        if self.firstVote is None:
            self.firstVote = record
        self.lastVote = record
        self._p_changed = 1

    def getResults(self):
        res = {}
        for question, answers in self._results.items():
            res[question] = {}
            size = float(answers.voteCount.value)
            for answer, votes in answers.items():
                res[question][answer] = (votes.value, votes.value/size)
        return res, self
Exemple #9
0
 def group(self, seq):
   sortIndex = self._sortIndex; sortReverse = self._sortReverse
   ns = len(seq); ni = len(sortIndex)
   if ns >= 0.1 * ni:
     # result large compared to index -- sort via index
     handled = IISet(); hn = 0
     _load = getattr(sortIndex, '_load', None)
     if _load is None:
       # not an optimized index
       items = sortIndex.items()
       
       _load = lambda (x1, x2): x2
       if sortReverse: items.reverse()
     elif sortReverse:
       gRO = getattr(sortIndex, 'getReverseOrder', None)
       items = gRO and gRO()
       if items is None:
         items = list(sortIndex._index.keys()); items.reverse()
     else: items = sortIndex._index.keys()
     for i in items:
       ids = intersection(seq, _load(i))
       if ids:
         handled.update(ids); hn += len(ids)
         yield i, ids
     if hn != len(seq): yield None, difference(seq, handled)
   else:
     # result relatively small -- sort via result
     m = OOBTree()
     keyFor = getattr(sortIndex, 'keyForDocument', None)
     # work around "nogopip" bug: it defines "keyForDocument" as an integer
     if not callable(keyFor):
       # this will fail, when the index neither defines a reasonable
       # "keyForDocument" nor "documentToKeyMap". In this case,
       # the index cannot be used for sorting.
       keyFor = lambda doc, map=sortIndex.documentToKeyMap(): map[doc]
     noValue = IITreeSet()
     for doc in seq.keys():
       try: k = keyFor(doc)
       except KeyError: noValue.insert(doc); continue
       l = m.get(k)
       if l is None: l = m[k] = IITreeSet()
       l.insert(doc)
     items = m.items()
     if sortReverse: items = list(items); items.reverse()
     for i in items: yield i
     if noValue: yield None, noValue
class RegistrationStorage(object):

    attr_name = '_registration_confirmations'

    def __init__(self, context):
        self.context = context
        try:
            self._data = getattr(context, self.attr_name)
        except AttributeError:
            self._data = OOBTree()
            setattr(context, self.attr_name, self._data)

    def add(self, email, data=None):
        self.clean()
        email = email.lower()
        if data is None:
            data = {}
        data.update({
            'created': time(),
            'code': makeRandomCode(100)
        })
        self._data[email] = data
        return data

    def remove(self, email):
        if email.lower() in self._data:
            del self._data[email.lower()]

    def get(self, email):
        return self._data.get(email.lower())

    def clean(self):
        now = time()
        delete = []
        for email, item in self._data.items():
            if not item:
                delete.append(email)
                continue
            created = item['created']
            # delete all older than 1 hour
            if int((now - created) / 60 / 60) > 1:
                delete.append(email)
        for code in delete:
            del self._data[code]
Exemple #11
0
class FriendlyNameStorage(Persistent):
    implements(IFriendlyNameStorage)

    def __init__(self):
        self._forward = OOBTree()  # name -> uid
        self._reverse = OOBTree()  # uid -> name

    def add(self, uid, name):
        """ Map name -> uid. """
        store_name = name.lower()
        if store_name in self._forward:
            raise ValueError("%s already mapped" % name)
        if uid in self._reverse:
            raise ValueError("%s already has a friendly name" % uid)
        self._forward[store_name] = uid
        self._reverse[uid] = store_name

    def remove(self, uid):
        """ Remove mapping. This will be called when a folder is deleted,
            therefore we use the uid. """
        marker = object()
        name = self._reverse.get(uid, marker)
        if name is not marker:
            del(self._reverse[uid])
            try:
                del(self._forward[name])
            except KeyError:
                # If it isn't there, good, that is the outcome we wanted,
                # right?
                pass

    def get(self, name, _marker=None):
        """ Look up name, map uid to an object and return it. """
        return self._forward.get(name.lower(), _marker)

    def lookup(self, uid, _marker=None):
        """ Look up uid, return name. """
        return self._reverse.get(uid, _marker)

    def __getitem__(self, key):
        return self._forward.items()[key]

    def __len__(self):
        return len(self._forward)
Exemple #12
0
class OOBTreeState(object):
    """Non-empty OOBTrees have a complicated tuple structure."""

    def __init__(self, type, state, tid):
        self.btree = OOBTree()
        self.btree.__setstate__(state)
        self.state = state
        # Large btrees have more than one bucket; we have to load old states
        # to all of them.  See BTreeTemplate.c and BucketTemplate.c for
        # docs of the pickled state format.
        while state and len(state) > 1:
            bucket = state[1]
            state = IObjectHistory(bucket).loadState(tid)
            # XXX this is dangerous!
            bucket.__setstate__(state)

        self._items = list(self.btree.items())
        self._dict = dict(self.btree)

        # now UNDO to avoid dangerous side effects,
        # see https://bugs.launchpad.net/zodbbrowser/+bug/487243
        state = self.state
        while state and len(state) > 1:
            bucket = state[1]
            state = IObjectHistory(bucket).loadState()
            bucket.__setstate__(state)

    def getError(self):
        return None

    def getName(self):
        return None

    def getParent(self):
        return None

    def listAttributes(self):
        return None

    def listItems(self):
        return self._items

    def asDict(self):
        return self._dict
Exemple #13
0
class CacheStore(Persistent):
    """
    basic persistent cache object
    
    see cache.txt
    """
    def __init__(self, id_):
        self.id = id_
        self.field={}
        self._cache = OOBTree()

    def __repr__(self):
        name = self.__class__.__name__
        name = "%s '%s'" %(name, self.id)
        return "%s %s :: %s" %(name, self.field, [x for x in self._cache.items()])

    def get(self, key, default=None):
        return self._cache.get(key, default)

    def set(self, key, value):
        self._cache[key] = value
        self._p_changed

    def getCache(self, key):
        subcache = self.field.get(key, _marker)
        if subcache is _marker:
            cache = Cache(parent=self, id_=self.id)
            self.field[key] = cache
            subcache = self.field[key]
            self._p_changed
        return subcache
        
    def remove(self, key):
        val = self._cache.get(key)
        if val:
            del self._cache[key]
            self._p_changed=True
            for name, fcache in self.field.items():
                for slug, uid in fcache.items():
                    if uid==key:
                        del fcache[slug]
Exemple #14
0
    def sort(self, result):
        index = self.index
        values = self.index.documents_to_values

        seq = OOBTree()
        for key in result:
            idx = values.get(key, None)
            if idx is None:
                continue

            data = seq.get(idx, None)
            if data is None:
                data = OOSet()
            data.insert(key)
            seq[idx] = data

        result = []
        for idx, data in seq.items():
            result.extend(data)

        return result
Exemple #15
0
class Survey(Content, ContextACLMixin, LocalRolesMixin, TranslationMixin):
    type_title = _("Survey")
    type_name = "Survey"
    add_permission = ADD_SURVEY
    allow_anonymous_to_invite_themselves = False
#    allow_anonymous_to_start = False

    def __init__(self, **kw):
        self.tokens = OOBTree()
        super(Survey, self).__init__(**kw)

    def create_token(self, email, size = 15, hours = 0, overwrite = False):
        """ Create a survey invitation token."""
        if email not in self.tokens or (email in self.tokens and overwrite == True):
            token = None
            while token is None or token in self.tokens.values():
                token = Token(size = size, hours = hours)
            self.tokens[email] = token
        return self.tokens[email]

    def get_participants_data(self):
        """Returns the participants with statistics on the survey
        """
        participants = []
        for (email, uid) in self.tokens.items():
            participant = {} 
            participant['uid'] = uid
            participant['email'] = email
            response = 0
            questions = 0
            sections = [x for x in self.values() if ISurveySection.providedBy(x)]
            for section in sections:
                response += len(section.responses.get(uid, {}))
                questions += len(section.question_ids)
            if response != 0:
                participant['finished'] = Decimal(response) / Decimal(questions) * 100
            else:
                participant['finished'] = 0                
            participants.append(participant)
        return participants
Exemple #16
0
 def group(self, seq):
   sortIndex = self._sortIndex; sortReverse = self._sortReverse
   ns = len(seq); ni = len(sortIndex)
   if ns >= 0.1 * ni:
     # result large compared to index -- sort via index
     handled = IISet(); hn = 0
     _load = getattr(sortIndex, '_load', None)
     if _load is None:
       # not an optimized index
       items = sortIndex.items()
       
       _load = lambda (x1, x2): x2
       if sortReverse: items.reverse()
     elif sortReverse:
       gRO = getattr(sortIndex, 'getReverseOrder', None)
       items = gRO and gRO()
       if items is None:
         items = list(sortIndex._index.keys()); items.reverse()
     else: items = sortIndex._index.keys()
     for i in items:
       ids = intersection(seq, _load(i))
       if ids:
         handled.update(ids); hn += len(ids)
         yield i, ids
     if hn != len(seq): yield None, difference(seq, handled)
   else:
     # result relatively small -- sort via result
     keyFor = sortIndex.keyForDocument; m = OOBTree()
     noValue = IITreeSet()
     for doc in seq.keys():
       try: k = keyFor(doc)
       except KeyError: noValue.insert(doc); continue
       l = m.get(k)
       if l is None: l = m[k] = IITreeSet()
       l.insert(doc)
     items = m.items()
     if sortReverse: items = list(items); items.reverse()
     for i in items: yield i
     if noValue: yield None, noValue
Exemple #17
0
class PersistentSessionDataContainer(Persistent, IterableUserDict):
    """A SessionDataContainer that stores data in the ZODB"""
    __parent__ = __name__ = None

    implements(ISessionDataContainer, ILocalUtility, IAttributeAnnotatable)

    _v_last_sweep = 0 # Epoch time sweep last run

    def __init__(self):
        self.data = OOBTree()
        self.timeout = 1 * 60 * 60
        self.resolution = 50*60

    def __getitem__(self, pkg_id):
        """Retrieve an ISessionData

            >>> sdc = PersistentSessionDataContainer()

            >>> sdc.timeout = 60
            >>> sdc.resolution = 3
            >>> sdc['clientid'] = sd = SessionData()

        To ensure stale data is removed, we can wind
        back the clock using undocumented means...

            >>> sd.lastAccessTime = sd.lastAccessTime - 64
            >>> sdc._v_last_sweep = sdc._v_last_sweep - 4

        Now the data should be garbage collected

            >>> sdc['clientid']
            Traceback (most recent call last):
                [...]
            KeyError: 'clientid'

        Ensure lastAccessTime on the ISessionData is being updated
        occasionally. The ISessionDataContainer maintains this whenever
        the ISessionData is set or retrieved.

        lastAccessTime on the ISessionData is set when it is added
        to the ISessionDataContainer

            >>> sdc['client_id'] = sd = SessionData()
            >>> sd.lastAccessTime > 0
            True

        lastAccessTime is also updated whenever the ISessionData
        is retrieved through the ISessionDataContainer, at most
        once every 'resolution' seconds.

            >>> then = sd.lastAccessTime = sd.lastAccessTime - 4
            >>> now = sdc['client_id'].lastAccessTime
            >>> now > then
            True
            >>> time.sleep(1)
            >>> now == sdc['client_id'].lastAccessTime
            True

        Ensure lastAccessTime is not modified and no garbage collection
        occurs when timeout == 0. We test this by faking a stale
        ISessionData object.

            >>> sdc.timeout = 0
            >>> sd.lastAccessTime = sd.lastAccessTime - 5000
            >>> lastAccessTime = sd.lastAccessTime
            >>> sdc['client_id'].lastAccessTime == lastAccessTime
            True

        Next, we test session expiration functionality beyond transactions.

            >>> import transaction
            >>> from ZODB.DB import DB
            >>> from ZODB.DemoStorage import DemoStorage
            >>> sdc = PersistentSessionDataContainer()
            >>> sdc.timeout = 60
            >>> sdc.resolution = 3
            >>> db = DB(DemoStorage('test_storage'))
            >>> c = db.open()
            >>> c.root()['sdc'] = sdc
            >>> sdc['pkg_id'] = sd = SessionData()
            >>> sd['name'] = 'bob'
            >>> transaction.commit()

        Access immediately. the data should be accessible.

            >>> c.root()['sdc']['pkg_id']['name']
            'bob'

        Change the clock time and stale the session data.

            >>> sdc = c.root()['sdc']
            >>> sd = sdc['pkg_id']
            >>> sd.lastAccessTime = sd.lastAccessTime - 64
            >>> sdc._v_last_sweep = sdc._v_last_sweep - 4
            >>> transaction.commit()

        The data should be garbage collected.

            >>> c.root()['sdc']['pkg_id']['name']
            Traceback (most recent call last):
                [...]
            KeyError: 'pkg_id'

        Then abort transaction and access the same data again.
        The previous GC was cancelled, but deadline is over.
        The data should be garbage collected again.

            >>> transaction.abort()
            >>> c.root()['sdc']['pkg_id']['name']
            Traceback (most recent call last):
                [...]
            KeyError: 'pkg_id'

        """
        if self.timeout == 0:
            return IterableUserDict.__getitem__(self, pkg_id)

        now = time.time()

        # TODO: When scheduler exists, sweeping should be done by
        # a scheduled job since we are currently busy handling a
        # request and may end up doing simultaneous sweeps

        # If transaction is aborted after sweep. _v_last_sweep keep
        # incorrect sweep time. So when self.data is ghost, revert the time
        # to the previous _v_last_sweep time(_v_old_sweep).
        if self.data._p_state < 0:
            try:
                self._v_last_sweep = self._v_old_sweep
                del self._v_old_sweep
            except AttributeError:
                pass

        if self._v_last_sweep + self.resolution < now:
            self.sweep()
            if getattr(self, '_v_old_sweep', None) is None:
                self._v_old_sweep = self._v_last_sweep
            self._v_last_sweep = now

        rv = IterableUserDict.__getitem__(self, pkg_id)
        # Only update lastAccessTime once every few minutes, rather than
        # every hit, to avoid ZODB bloat and conflicts
        if rv.lastAccessTime + self.resolution < now:
            rv.lastAccessTime = int(now)
        return rv

    def __setitem__(self, pkg_id, session_data):
        """Set an ISessionPkgData

            >>> sdc = PersistentSessionDataContainer()
            >>> sad = SessionData()

        __setitem__ sets the ISessionData's lastAccessTime

            >>> sad.lastAccessTime
            0
            >>> sdc['1'] = sad
            >>> 0 < sad.lastAccessTime <= time.time()
            True

        We can retrieve the same object we put in

            >>> sdc['1'] is sad
            True

        """
        session_data.lastAccessTime = int(time.time())
        return IterableUserDict.__setitem__(self, pkg_id, session_data)

    def sweep(self):
        """Clean out stale data

            >>> sdc = PersistentSessionDataContainer()
            >>> sdc['1'] = SessionData()
            >>> sdc['2'] = SessionData()

        Wind back the clock on one of the ISessionData's
        so it gets garbage collected

            >>> sdc['2'].lastAccessTime -= sdc.timeout * 2

        Sweep should leave '1' and remove '2'

            >>> sdc.sweep()
            >>> sd1 = sdc['1']
            >>> sd2 = sdc['2']
            Traceback (most recent call last):
                [...]
            KeyError: '2'

        """
        # We only update the lastAccessTime every 'resolution' seconds.
        # To compensate for this, we factor in the resolution when
        # calculating the expiry time to ensure that we never remove
        # data that has been accessed within timeout seconds.
        expire_time = time.time() - self.timeout - self.resolution
        heap = [(v.lastAccessTime, k) for k,v in self.data.items()]
        heapify(heap)
        while heap:
            lastAccessTime, key = heappop(heap)
            if lastAccessTime < expire_time:
                del self.data[key]
            else:
                return
class UnIndex(SimpleItem):

    """Simple forward and reverse index.
    """
    implements(ILimitedResultIndex, IUniqueValueIndex, ISortIndex)
    _counter = None

    def __init__(self, id, ignore_ex=None, call_methods=None,
                 extra=None, caller=None):
        """Create an unindex

        UnIndexes are indexes that contain two index components, the
        forward index (like plain index objects) and an inverted
        index.  The inverted index is so that objects can be unindexed
        even when the old value of the object is not known.

        e.g.

        self._index = {datum:[documentId1, documentId2]}
        self._unindex = {documentId:datum}

        The arguments are:

          'id' -- the name of the item attribute to index.  This is
          either an attribute name or a record key.

          'ignore_ex' -- should be set to true if you want the index
          to ignore exceptions raised while indexing instead of
          propagating them.

          'call_methods' -- should be set to true if you want the index
          to call the attribute 'id' (note: 'id' should be callable!)
          You will also need to pass in an object in the index and
          uninded methods for this to work.

          'extra' -- a mapping object that keeps additional
          index-related parameters - subitem 'indexed_attrs'
          can be string with comma separated attribute names or
          a list

          'caller' -- reference to the calling object (usually
          a (Z)Catalog instance
        """

        def _get(o, k, default):
            """ return a value for a given key of a dict/record 'o' """
            if isinstance(o, dict):
                return o.get(k, default)
            else:
                return getattr(o, k, default)

        self.id = id
        self.ignore_ex = ignore_ex  # currently unimplemented
        self.call_methods = call_methods

        self.operators = ('or', 'and')
        self.useOperator = 'or'

        # allow index to index multiple attributes
        ia = _get(extra, 'indexed_attrs', id)
        if isinstance(ia, str):
            self.indexed_attrs = ia.split(',')
        else:
            self.indexed_attrs = list(ia)
        self.indexed_attrs = [
            attr.strip() for attr in self.indexed_attrs if attr]
        if not self.indexed_attrs:
            self.indexed_attrs = [id]

        self.clear()

    def __len__(self):
        return self._length()

    def getId(self):
        return self.id

    def clear(self):
        self._length = Length()
        self._index = OOBTree()
        self._unindex = IOBTree()
        self._counter = Length()

    def __nonzero__(self):
        return not not self._unindex

    def histogram(self):
        """Return a mapping which provides a histogram of the number of
        elements found at each point in the index.
        """
        histogram = {}
        for item in self._index.items():
            if isinstance(item, int):
                entry = 1  # "set" length is 1
            else:
                key, value = item
                entry = len(value)
            histogram[entry] = histogram.get(entry, 0) + 1
        return histogram

    def referencedObjects(self):
        """Generate a list of IDs for which we have referenced objects."""
        return self._unindex.keys()

    def getEntryForObject(self, documentId, default=_marker):
        """Takes a document ID and returns all the information we have
        on that specific object.
        """
        if default is _marker:
            return self._unindex.get(documentId)
        return self._unindex.get(documentId, default)

    def removeForwardIndexEntry(self, entry, documentId):
        """Take the entry provided and remove any reference to documentId
        in its entry in the index.
        """
        indexRow = self._index.get(entry, _marker)
        if indexRow is not _marker:
            try:
                indexRow.remove(documentId)
                if not indexRow:
                    del self._index[entry]
                    self._length.change(-1)
            except ConflictError:
                raise
            except AttributeError:
                # index row is an int
                try:
                    del self._index[entry]
                except KeyError:
                    # XXX swallow KeyError because it was probably
                    # removed and then _length AttributeError raised
                    pass
                if isinstance(self.__len__, Length):
                    self._length = self.__len__
                    del self.__len__
                self._length.change(-1)
            except Exception:
                LOG.error('%s: unindex_object could not remove '
                          'documentId %s from index %s.  This '
                          'should not happen.' %
                          (self.__class__.__name__,
                           str(documentId), str(self.id)),
                          exc_info=sys.exc_info())
        else:
            LOG.error('%s: unindex_object tried to retrieve set %s '
                      'from index %s but couldn\'t.  This '
                      'should not happen.' %
                      (self.__class__.__name__,
                       repr(entry), str(self.id)))

    def insertForwardIndexEntry(self, entry, documentId):
        """Take the entry provided and put it in the correct place
        in the forward index.

        This will also deal with creating the entire row if necessary.
        """
        indexRow = self._index.get(entry, _marker)

        # Make sure there's actually a row there already. If not, create
        # a set and stuff it in first.
        if indexRow is _marker:
            # We always use a set to avoid getting conflict errors on
            # multiple threads adding a new row at the same time
            self._index[entry] = IITreeSet((documentId, ))
            self._length.change(1)
        else:
            try:
                indexRow.insert(documentId)
            except AttributeError:
                # Inline migration: index row with one element was an int at
                # first (before Zope 2.13).
                indexRow = IITreeSet((indexRow, documentId))
                self._index[entry] = indexRow

    def index_object(self, documentId, obj, threshold=None):
        """ wrapper to handle indexing of multiple attributes """

        fields = self.getIndexSourceNames()
        res = 0
        for attr in fields:
            res += self._index_object(documentId, obj, threshold, attr)

        if res > 0:
            self._increment_counter()

        return res > 0

    def _index_object(self, documentId, obj, threshold=None, attr=''):
        """ index and object 'obj' with integer id 'documentId'"""
        returnStatus = 0

        # First we need to see if there's anything interesting to look at
        datum = self._get_object_datum(obj, attr)
        if datum is None:
            # Prevent None from being indexed. None doesn't have a valid
            # ordering definition compared to any other object.
            # BTrees 4.0+ will throw a TypeError
            # "object has default comparison" and won't let it be indexed.
            raise TypeError('None cannot be indexed.')

        # We don't want to do anything that we don't have to here, so we'll
        # check to see if the new and existing information is the same.
        oldDatum = self._unindex.get(documentId, _marker)
        if datum != oldDatum:
            if oldDatum is not _marker:
                self.removeForwardIndexEntry(oldDatum, documentId)
                if datum is _marker:
                    try:
                        del self._unindex[documentId]
                    except ConflictError:
                        raise
                    except Exception:
                        LOG.error('Should not happen: oldDatum was there, '
                                  'now its not, for document: %s' % documentId)

            if datum is not _marker:
                self.insertForwardIndexEntry(datum, documentId)
                self._unindex[documentId] = datum

            returnStatus = 1

        return returnStatus

    def _get_object_datum(self, obj, attr):
        # self.id is the name of the index, which is also the name of the
        # attribute we're interested in.  If the attribute is callable,
        # we'll do so.
        try:
            datum = getattr(obj, attr)
            if safe_callable(datum):
                datum = datum()
        except (AttributeError, TypeError):
            datum = _marker
        return datum

    def _increment_counter(self):
        if self._counter is None:
            self._counter = Length()
        self._counter.change(1)

    def getCounter(self):
        """Return a counter which is increased on index changes"""
        return self._counter is not None and self._counter() or 0

    def numObjects(self):
        """Return the number of indexed objects."""
        return len(self._unindex)

    def indexSize(self):
        """Return the size of the index in terms of distinct values."""
        return len(self)

    def unindex_object(self, documentId):
        """ Unindex the object with integer id 'documentId' and don't
        raise an exception if we fail
        """
        unindexRecord = self._unindex.get(documentId, _marker)
        if unindexRecord is _marker:
            return None

        self._increment_counter()

        self.removeForwardIndexEntry(unindexRecord, documentId)
        try:
            del self._unindex[documentId]
        except ConflictError:
            raise
        except Exception:
            LOG.debug('Attempt to unindex nonexistent document'
                      ' with id %s' % documentId, exc_info=True)

    def _apply_not(self, not_parm, resultset=None):
        index = self._index
        setlist = []
        for k in not_parm:
            s = index.get(k, None)
            if s is None:
                continue
            elif isinstance(s, int):
                s = IISet((s, ))
            setlist.append(s)
        return multiunion(setlist)

    def _convert(self, value, default=None):
        return value

    def _apply_index(self, request, resultset=None):
        """Apply the index to query parameters given in the request arg.

        The request argument should be a mapping object.

        If the request does not have a key which matches the "id" of
        the index instance, then None is returned.

        If the request *does* have a key which matches the "id" of
        the index instance, one of a few things can happen:

          - if the value is a blank string, None is returned (in
            order to support requests from web forms where
            you can't tell a blank string from empty).

          - if the value is a nonblank string, turn the value into
            a single-element sequence, and proceed.

          - if the value is a sequence, return a union search.

          - If the value is a dict and contains a key of the form
            '<index>_operator' this overrides the default method
            ('or') to combine search results. Valid values are "or"
            and "and".

        If None is not returned as a result of the abovementioned
        constraints, two objects are returned.  The first object is a
        ResultSet containing the record numbers of the matching
        records.  The second object is a tuple containing the names of
        all data fields used.

        FAQ answer:  to search a Field Index for documents that
        have a blank string as their value, wrap the request value
        up in a tuple ala: request = {'id':('',)}
        """
        record = parseIndexRequest(request, self.id, self.query_options)
        if record.keys is None:
            return None

        index = self._index
        r = None
        opr = None

        # not / exclude parameter
        not_parm = record.get('not', None)
        if not record.keys and not_parm:
            # convert into indexed format
            not_parm = map(self._convert, not_parm)
            # we have only a 'not' query
            record.keys = [k for k in index.keys() if k not in not_parm]
        else:
            # convert query arguments into indexed format
            record.keys = map(self._convert, record.keys)

        # experimental code for specifing the operator
        operator = record.get('operator', self.useOperator)
        if not operator in self.operators:
            raise RuntimeError("operator not valid: %s" % escape(operator))

        # Range parameter
        range_parm = record.get('range', None)
        if range_parm:
            opr = "range"
            opr_args = []
            if range_parm.find("min") > -1:
                opr_args.append("min")
            if range_parm.find("max") > -1:
                opr_args.append("max")

        if record.get('usage', None):
            # see if any usage params are sent to field
            opr = record.usage.lower().split(':')
            opr, opr_args = opr[0], opr[1:]

        if opr == "range":  # range search
            if 'min' in opr_args:
                lo = min(record.keys)
            else:
                lo = None
            if 'max' in opr_args:
                hi = max(record.keys)
            else:
                hi = None
            if hi:
                setlist = index.values(lo, hi)
            else:
                setlist = index.values(lo)

            # If we only use one key, intersect and return immediately
            if len(setlist) == 1:
                result = setlist[0]
                if isinstance(result, int):
                    result = IISet((result,))
                if not_parm:
                    exclude = self._apply_not(not_parm, resultset)
                    result = difference(result, exclude)
                return result, (self.id,)

            if operator == 'or':
                tmp = []
                for s in setlist:
                    if isinstance(s, int):
                        s = IISet((s,))
                    tmp.append(s)
                r = multiunion(tmp)
            else:
                # For intersection, sort with smallest data set first
                tmp = []
                for s in setlist:
                    if isinstance(s, int):
                        s = IISet((s,))
                    tmp.append(s)
                if len(tmp) > 2:
                    setlist = sorted(tmp, key=len)
                else:
                    setlist = tmp
                r = resultset
                for s in setlist:
                    # the result is bound by the resultset
                    r = intersection(r, s)

        else:  # not a range search
            # Filter duplicates
            setlist = []
            for k in record.keys:
                if k is None:
                    raise TypeError('None cannot be in an index.')
                s = index.get(k, None)
                # If None, try to bail early
                if s is None:
                    if operator == 'or':
                        # If union, we can't possibly get a bigger result
                        continue
                    # If intersection, we can't possibly get a smaller result
                    return IISet(), (self.id,)
                elif isinstance(s, int):
                    s = IISet((s,))
                setlist.append(s)

            # If we only use one key return immediately
            if len(setlist) == 1:
                result = setlist[0]
                if isinstance(result, int):
                    result = IISet((result,))
                if not_parm:
                    exclude = self._apply_not(not_parm, resultset)
                    result = difference(result, exclude)
                return result, (self.id,)

            if operator == 'or':
                # If we already get a small result set passed in, intersecting
                # the various indexes with it and doing the union later is
                # faster than creating a multiunion first.
                if resultset is not None and len(resultset) < 200:
                    smalllist = []
                    for s in setlist:
                        smalllist.append(intersection(resultset, s))
                    r = multiunion(smalllist)
                else:
                    r = multiunion(setlist)
            else:
                # For intersection, sort with smallest data set first
                if len(setlist) > 2:
                    setlist = sorted(setlist, key=len)
                r = resultset
                for s in setlist:
                    r = intersection(r, s)

        if isinstance(r, int):
            r = IISet((r, ))
        if r is None:
            return IISet(), (self.id,)
        if not_parm:
            exclude = self._apply_not(not_parm, resultset)
            r = difference(r, exclude)
        return r, (self.id,)

    def hasUniqueValuesFor(self, name):
        """has unique values for column name"""
        if name == self.id:
            return 1
        return 0

    def getIndexSourceNames(self):
        """ return sequence of indexed attributes """
        # BBB:  older indexes didn't have 'indexed_attrs'
        return getattr(self, 'indexed_attrs', [self.id])

    def getIndexQueryNames(self):
        """Indicate that this index applies to queries for the index's name."""
        return (self.id,)

    def uniqueValues(self, name=None, withLengths=0):
        """returns the unique values for name

        if withLengths is true, returns a sequence of
        tuples of (value, length)
        """
        if name is None:
            name = self.id
        elif name != self.id:
            raise StopIteration

        if not withLengths:
            for key in self._index.keys():
                yield key
        else:
            for key, value in self._index.items():
                if isinstance(value, int):
                    yield (key, 1)
                else:
                    yield (key, len(value))

    def keyForDocument(self, id):
        # This method is superseded by documentToKeyMap
        return self._unindex[id]

    def documentToKeyMap(self):
        return self._unindex

    def items(self):
        items = []
        for k, v in self._index.items():
            if isinstance(v, int):
                v = IISet((v,))
            items.append((k, v))
        return items
class BaseQuestion(ATCTContent):
    """Base class for survey questions"""
    immediate_view = "base_edit"
    global_allow = 0
    filter_content_types = 1
    allowed_content_types = ()
    include_default_actions = 1
    _at_rename_after_creation = True
    security = ClassSecurityInfo()

    @security.protected(ModifyPortalContent)
    def reset(self):
        """Remove answers for all users."""
        self.answers = OOBTree()

    @security.protected(ModifyPortalContent)
    def resetForUser(self, userid):
        """Remove answer for a single user"""
        if userid in self.answers:
            del self.answers[userid]

    @security.protected(View)
    def addAnswer(self, value, comments=""):
        """Add an answer and optional comments for a user.
        This method protects _addAnswer from anonymous users specifying a
        userid when they vote, and thus apparently voting as another user
        of their choice.
        """
        # Get hold of the parent survey
        survey = None
        ob = self
        while survey is None:
            ob = aq_parent(aq_inner(ob))
            if ob.meta_type == 'Survey':
                survey = ob
            elif getattr(ob, '_isPortalRoot', False):
                raise Exception("Could not find a parent Survey.")
        portal_membership = getToolByName(self, 'portal_membership')
        is_anon = portal_membership.isAnonymousUser()
        if is_anon and not survey.getAllowAnonymous():
            raise Unauthorized(
                "This survey is not available to anonymous users."
            )
        userid = self.getSurveyId()
        if is_anon and userid not in survey.getRespondentsList():
            # anon is not added on survey view, so may need to be added
            survey.addRespondent(userid)
        # Call the real method for storing the answer for this user.
        return self._addAnswer(userid, value, comments)

    def _addAnswer(self, userid, value, comments=""):
        """Add an answer and optional comments for a user."""
        # We don't let users over-write answers that they've already made.
        # Their first answer must be explicitly 'reset' before another
        # answer can be supplied.
        # XXX this causes problem when survey fails validation
        # will also cause problem with save function
#        if self.answers.has_key(userid):
#            # XXX Should this get raised?  If so, a more appropriate
#            # exception is probably in order.
#            msg = "User '%s' has already answered this question.
#                   Reset the original response to supply a new answer."
#            raise Exception(msg % userid)
#        else:
        self.answers[userid] = PersistentMapping(value=value,
                                                 comments=comments)
        if not isinstance(self.answers, (PersistentMapping, OOBTree)):
            # It must be a standard dictionary from an old install, so
            # we need to inform the ZODB about the change manually.
            self.answers._p_changed = 1

    @security.protected(View)
    def getAnswerFor(self, userid):
        """Get a specific user's answer"""
        answer = self.answers.get(userid, {}).get('value', None)
        if self.getInputType() in ['multipleSelect', 'checkbox']:
            if type(answer) == 'NoneType':
                return []
        if self.getInputType() in ['radio', 'selectionBox']:
            if not answer:
                return ""
            if isinstance(answer, unicode):
                answer = answer.encode('utf8')
            return str(answer)
        return answer

    @security.protected(View)
    def getCommentsFor(self, userid):
        """Get a specific user's comments"""
        return self.answers.get(userid, {}).get('comments', None)

    @security.protected(View)
    def getComments(self):
        """Return a userid, comments mapping"""
        mlist = []
        for k, v in self.answers.items():
            mapping = {}
            mapping['userid'] = k
            mapping['comments'] = v.get('comments', '')
            mlist.append(mapping)
        return mlist

    @security.protected(View)
    def getNumberOfRespondents(self):
        return len(self.answers.keys())

    @security.private
    def _get_yes_no_default(self):
        translation_service = getToolByName(self, 'translation_service')
        return (translation_service.utranslate(domain='plonesurvey',
                                               msgid=u'Yes',
                                               context=self),
                translation_service.utranslate(domain='plonesurvey',
                                               msgid=u'No',
                                               context=self), )

    @security.private
    def _get_commentLabel_default(self):
        translation_service = getToolByName(self, 'translation_service')
        return translation_service.utranslate(
            domain='plonesurvey',
            msgid=u'commentLabelDefault',
            default=u'Comment - mandatory if "no"', context=self)
Exemple #20
0
class ZODBRoleManager( BasePlugin ):

    """ PAS plugin for managing roles in the ZODB.
    """
    meta_type = 'ZODB Role Manager'

    security = ClassSecurityInfo()

    def __init__(self, id, title=None):

        self._id = self.id = id
        self.title = title

        self._roles = OOBTree()
        self._principal_roles = OOBTree()

    def manage_afterAdd( self, item, container ):

        self.addRole( 'Manager' )

        if item is self:
            role_holder = aq_parent( aq_inner( container ) )
            for role in getattr( role_holder, '__ac_roles__', () ):
                try:
                    if role not in ('Anonymous', 'Authenticated'):
                        self.addRole( role )
                except KeyError:
                    pass

    #
    #   IRolesPlugin implementation
    #
    security.declarePrivate( 'getRolesForPrincipal' )
    def getRolesForPrincipal( self, principal, request=None ):

        """ See IRolesPlugin.
        """
        result = list( self._principal_roles.get( principal.getId(), () ) )

        getGroups = getattr( principal, 'getGroups', lambda x: () )
        for group_id in getGroups():
            result.extend( self._principal_roles.get( group_id, () ) )

        return tuple( result )

    #
    #   IRoleEnumerationPlugin implementation
    #
    def enumerateRoles( self
                      , id=None
                      , exact_match=False
                      , sort_by=None
                      , max_results=None
                      , **kw
                      ):

        """ See IRoleEnumerationPlugin.
        """
        role_info = []
        role_ids = []
        plugin_id = self.getId()

        if isinstance( id, str ):
            id = [ id ]

        if exact_match and ( id ):
            role_ids.extend( id )

        if role_ids:
            role_filter = None

        else:   # Searching
            role_ids = self.listRoleIds()
            role_filter = _ZODBRoleFilter( id, **kw )

        for role_id in role_ids:

            if self._roles.get( role_id ):
                e_url = '%s/manage_roles' % self.getId()
                p_qs = 'role_id=%s' % role_id
                m_qs = 'role_id=%s&assign=1' % role_id

                info = {}
                info.update( self._roles[ role_id ] )

                info[ 'pluginid' ] = plugin_id
                info[ 'properties_url'  ] = '%s?%s' % (e_url, p_qs)
                info[ 'members_url'  ] = '%s?%s' % (e_url, m_qs)

                if not role_filter or role_filter( info ):
                    role_info.append( info )

        return tuple( role_info )

    #
    #   IRoleAssignerPlugin implementation
    #
    security.declarePrivate( 'doAssignRoleToPrincipal' )
    def doAssignRoleToPrincipal( self, principal_id, role ):
        return self.assignRoleToPrincipal( role, principal_id )

    #
    #   Role management API
    #
    security.declareProtected( ManageUsers, 'listRoleIds' )
    def listRoleIds( self ):

        """ Return a list of the role IDs managed by this object.
        """
        return self._roles.keys()

    security.declareProtected( ManageUsers, 'listRoleInfo' )
    def listRoleInfo( self ):

        """ Return a list of the role mappings.
        """
        return self._roles.values()

    security.declareProtected( ManageUsers, 'getRoleInfo' )
    def getRoleInfo( self, role_id ):

        """ Return a role mapping.
        """
        return self._roles[ role_id ]

    security.declareProtected( ManageUsers, 'addRole' )
    def addRole( self, role_id, title='', description='' ):

        """ Add 'role_id' to the list of roles managed by this object.

        o Raise KeyError on duplicate.
        """
        if self._roles.get( role_id ) is not None:
            raise KeyError, 'Duplicate role: %s' % role_id

        self._roles[ role_id ] = { 'id' : role_id
                                 , 'title' : title
                                 , 'description' : description
                                 }

    security.declareProtected( ManageUsers, 'updateRole' )
    def updateRole( self, role_id, title, description ):

        """ Update title and description for the role.

        o Raise KeyError if not found.
        """
        self._roles[ role_id ].update( { 'title' : title
                                       , 'description' : description
                                       } )

    security.declareProtected( ManageUsers, 'removeRole' )
    def removeRole( self, role_id ):

        """ Remove 'role_id' from the list of roles managed by this object.

        o Raise KeyError if not found.
        """
        for principal_id in self._principal_roles.keys():
            self.removeRoleFromPrincipal( role_id, principal_id )

        del self._roles[ role_id ]

    #
    #   Role assignment API
    #
    security.declareProtected( ManageUsers, 'listAvailablePrincipals' )
    def listAvailablePrincipals( self, role_id, search_id ):

        """ Return a list of principal IDs to whom a role can be assigned.

        o If supplied, 'search_id' constrains the principal IDs;  if not,
          return empty list.

        o Omit principals with existing assignments.
        """
        result = []

        if search_id:  # don't bother searching if no criteria

            parent = aq_parent( self )

            for info in parent.searchPrincipals( max_results=20
                                               , sort_by='id'
                                               , id=search_id
                                               , exact_match=False
                                               ):
                id = info[ 'id' ]
                title = info.get( 'title', id )
                if ( role_id not in self._principal_roles.get( id, () )
                 and role_id != id ):
                    result.append( ( id, title ) )

        return result

    security.declareProtected( ManageUsers, 'listAssignedPrincipals' )
    def listAssignedPrincipals( self, role_id ):

        """ Return a list of principal IDs to whom a role is assigned.
        """
        result = []

        for k, v in self._principal_roles.items():
            if role_id in v:
                # should be at most one and only one mapping to 'k'

                parent = aq_parent( self )
                info = parent.searchPrincipals( id=k, exact_match=True )
                assert( len( info ) in ( 0, 1 ) )
                if len( info ) == 0:
                    title = '<%s: not found>' % k
                else:
                    title = info[0].get( 'title', k )
                result.append( ( k, title ) )

        return result

    security.declareProtected( ManageUsers, 'assignRoleToPrincipal' )
    def assignRoleToPrincipal( self, role_id, principal_id ):

        """ Assign a role to a principal (user or group).

        o Return a boolean indicating whether a new assignment was created.

        o Raise KeyError if 'role_id' is unknown.
        """
        role_info = self._roles[ role_id ] # raise KeyError if unknown!

        current = self._principal_roles.get( principal_id, () )
        already = role_id in current

        if not already:
            new = current + ( role_id, )
            self._principal_roles[ principal_id ] = new

        return not already

    security.declareProtected( ManageUsers, 'removeRoleFromPrincipal' )
    def removeRoleFromPrincipal( self, role_id, principal_id ):

        """ Remove a role from a principal (user or group).

        o Return a boolean indicating whether the role was already present.

        o Raise KeyError if 'role_id' is unknown.

        o Ignore requests to remove a role not already assigned to the
          principal.
        """
        role_info = self._roles[ role_id ] # raise KeyError if unknown!

        current = self._principal_roles.get( principal_id, () )
        new = tuple( [ x for x in current if x != role_id ] )
        already = current != new

        if already:
            self._principal_roles[ principal_id ] = new

        return already

    #
    #   ZMI
    #
    manage_options = ( ( { 'label': 'Roles', 
                           'action': 'manage_roles', }
                         ,
                       )
                     + BasePlugin.manage_options
                     )

    security.declareProtected( ManageUsers, 'manage_roles' )
    manage_roles = PageTemplateFile( 'www/zrRoles'
                                   , globals()
                                   , __name__='manage_roles'
                                   )

    security.declareProtected( ManageUsers, 'manage_twoLists' )
    manage_twoLists = PageTemplateFile( '../www/two_lists'
                                      , globals()
                                      , __name__='manage_twoLists'
                                      )

    security.declareProtected( ManageUsers, 'manage_addRole' )
    def manage_addRole( self
                      , role_id
                      , title
                      , description
                      , RESPONSE
                      ):
        """ Add a role via the ZMI.
        """
        self.addRole( role_id, title, description )

        message = 'Role+added'

        RESPONSE.redirect( '%s/manage_roles?manage_tabs_message=%s'
                         % ( self.absolute_url(), message )
                         )

    security.declareProtected( ManageUsers, 'manage_updateRole' )
    def manage_updateRole( self
                         , role_id
                         , title
                         , description
                         , RESPONSE
                         ):
        """ Update a role via the ZMI.
        """
        self.updateRole( role_id, title, description )

        message = 'Role+updated'

        RESPONSE.redirect( '%s/manage_roles?role_id=%s&manage_tabs_message=%s'
                         % ( self.absolute_url(), role_id, message )
                         )

    security.declareProtected( ManageUsers, 'manage_removeRoles' )
    def manage_removeRoles( self
                          , role_ids
                          , RESPONSE
                          ):
        """ Remove one or more roles via the ZMI.
        """
        role_ids = filter( None, role_ids )

        if not role_ids:
            message = 'no+roles+selected'

        else:

            for role_id in role_ids:
                self.removeRole( role_id )

            message = 'Roles+removed'

        RESPONSE.redirect( '%s/manage_roles?manage_tabs_message=%s'
                         % ( self.absolute_url(), message )
                         )

    security.declareProtected( ManageUsers, 'manage_assignRoleToPrincipals' )
    def manage_assignRoleToPrincipals( self
                                     , role_id
                                     , principal_ids
                                     , RESPONSE
                                     ):
        """ Assign a role to one or more principals via the ZMI.
        """
        assigned = []

        for principal_id in principal_ids:
            if self.assignRoleToPrincipal( role_id, principal_id ):
                assigned.append( principal_id )

        if not assigned:
            message = 'Role+%s+already+assigned+to+all+principals' % role_id
        else:
            message = 'Role+%s+assigned+to+%s' % ( role_id
                                                 , '+'.join( assigned )
                                                 )

        RESPONSE.redirect( ( '%s/manage_roles?role_id=%s&assign=1'
                           + '&manage_tabs_message=%s'
                           ) % ( self.absolute_url(), role_id, message )
                         )

    security.declareProtected( ManageUsers, 'manage_removeRoleFromPrincipals' )
    def manage_removeRoleFromPrincipals( self
                                       , role_id
                                       , principal_ids
                                       , RESPONSE
                                       ):
        """ Remove a role from one or more principals via the ZMI.
        """
        removed = []

        for principal_id in principal_ids:
            if self.removeRoleFromPrincipal( role_id, principal_id ):
                removed.append( principal_id )

        if not removed:
            message = 'Role+%s+alread+removed+from+all+principals' % role_id
        else:
            message = 'Role+%s+removed+from+%s' % ( role_id
                                                  , '+'.join( removed )
                                                  )

        RESPONSE.redirect( ( '%s/manage_roles?role_id=%s&assign=1'
                           + '&manage_tabs_message=%s'
                           ) % ( self.absolute_url(), role_id, message )
                         )
class ShibbolethHelper(BasePlugin):
    '''Multi-plugin Shibboleth

    '''

    meta_type = 'Shibboleth Helper'

    security = ClassSecurityInfo()

    manage_options = ( BasePlugin.manage_options +
                       ( { 'label': 'Configuration',
                           'action': 'manage_shibbolethhelper',
                           'help':('jcu.shibboleth.pas','manage_shibbolethhelper.stx')}
                         ,
                       ) +
                       ( { 'label': 'Map Roles',
                           'action': 'manage_roles',
                           'help':('jcu.shibboleth.pas','manage_mapping.stx')}
                         ,
                       ) +
                       ( { 'label': 'Map Groups',
                           'action': 'manage_groups',
                           'help':('jcu.shibboleth.pas','manage_mapping.stx')}
                         ,
                       ) +
                       ( { 'label': 'Import/Export',
                           'action': 'manage_importexport',
                           'help':('jcu.shibboleth.pas','manage_mapping.stx')}
                         ,
                       )
                     )

    _op_switch = None

    def __init__(self, id, title=None, total_shib=False):
        super(ShibbolethHelper, self).__init__()
        self._id = self.id = id
        self.title = title
        self.total_shib = total_shib
        self.log(INFO,'Initilizing Shibboleth Authentication.')
        self.login_path = "login"
        self.role_mapping =  PersistentMapping()
        self.log(INFO,'Role Mapping. %s' % self.role_mapping)
        self.group_mapping =  PersistentMapping()
        self.log(INFO,'Group Mapping. %s' % self.group_mapping)
        self._mapping_map = {Constants.RoleM: self.role_mapping, Constants.GroupM:self.group_mapping}
        self.__setup_compiled_func_map()

        # Shibboleth attributes store
        self.store = OOBTree()

        # Shibboleth attributes map
        self.attr_map = OOBTree()
        self.rattr_map = OOBTree()

        # Default Values for attribute map
        self.attr_map['HTTP_DISPLAYNAME'] = 'fullname'
        self.attr_map['HTTP_MAIL'] = 'email'
        self.rattr_map['fullname'] = 'HTTP_DISPLAYNAME'
        self.rattr_map['fullname_fallback'] = 'HTTP_CN'
        self.rattr_map['email'] = 'HTTP_MAIL'

        #Properties for the Property Manager.
        self.max_brackets = 6
        self.userid_attribute = 'HTTP_SHARED_TOKEN'
        self.idp_attribute = 'HTTP_SHIB_IDENTITY_PROVIDER'
        self.shibboleth_config_dir = '/etc/shibboleth'
        self.sso_url = '/Shibboleth.sso/DS'


    def __setup_compiled_func_map(self):
        self._v_compiled_mapping_func_map = {}
        for i in self._mapping_map:
            self._v_compiled_mapping_func_map[i] = {}

    #
    #   IAuthenticationPlugin implementation
    #
    security.declarePrivate('authenticateCredentials')
    def authenticateCredentials(self, credentials):
        """Authenticate Credentials
        """
        if not credentials.has_key('shibboleth.session'):
            log.debug("Will only authenticate Shibboleth credentials.")
            return None

        session_id = credentials.get('shibboleth.session')
        log.debug('Authentication Requested.')
        url = self.getLoginURL()
        request = self.REQUEST
        log.debug("URLS: %s, %s" % (request.URL, url))
        if request.URL == url:
            log.debug("Not attempting to authenticate login request.")
            return None

        if credentials['shibboleth.id'] == credentials['shibboleth.session']:
            login = "******" % credentials['shibboleth.id']
            return (self.prefix + session_id, login)

        login = credentials.get('shibboleth.id')
        return (self.prefix + login, login)


    #
    #   IChallengePlugin implementation
    #
    security.declarePrivate('challenge')
    def challenge(self, request, response):
        """The Challange
        """
        req = self.REQUEST
        resp = req['RESPONSE']

        self.log(INFO, "Challange.")
        url = self.getLoginURL()
        came_from = req.get('URL', '')
        query = req.get('QUERY_STRING')
        if query:
            if not query.startswith('?'):
                query = '?' + query
            came_from = came_from + query

        shibSessionId = self.__getShibbolethSessionId(request)
        if not shibSessionId:
            resp.redirect("%s?came_from=%s" % (url, came_from), lock=1)
            return True

        return False

    #
    #    ICredentialsResetPlugin implementation
    #
    security.declarePrivate('extractCredentials')
    def resetCredentials(self, request, response):
        """
            >>> from Products.PluggableAuthService.interfaces.plugins import \
                    ICredentialsResetPlugin
            >>> plugins = self.uf.plugins
            >>> plugins.activatePlugin(ICredentialsResetPlugin, 'shib')

            >>> print self.shib

        Expire the _shibsession_XXX cookie here to enable users to log
        out correctly.
        """
        for cookie in request.cookies:
            if 'shibsession' in cookie:
                response.expireCookie(cookie, path='/')

    #
    #    IDeleteCapability implementation
    #
    security.declarePublic('allowDeletePrincipal')
    def allowDeletePrincipal(self, principal_id):
        """
            >>> self.shib.store = {'B_0-_88s2CiUXmJx-PYW_8TugZI': {
            ...     u'HTTP_SHARED_TOKEN': 'B_0-_88s2CiUXmJx-PYW_8TugZI',
            ...     u'HTTP_CN': 'David B',
            ...     u'HTTP_MAIL': '*****@*****.**',
            ...     u'HTTP_REMOTE_USER':'******',
            ...     }
            ... }
            >>> self.shib.allowDeletePrincipal('B_0-_88s2CiUXmJx-PYW_8TugZI')
            1

        True iff this plugin can delete a certain user/group.
        This is true if this plugin manages the user.
        """
        if principal_id in self.store:
            return 1
        return 0

    #
    #   IUserManagement implementation
    #
    security.declarePrivate('doChangeUser')
    def doChangeUser(self, principal_id, password, **kw):
        """We don't change users.
        """
        pass

    security.declarePrivate('doDeleteUser')
    def doDeleteUser(self, principal_id):
        """
            >>> from Products.PluggableAuthService.interfaces.plugins import \
                    IPropertiesPlugin, IUserEnumerationPlugin
            >>> from Products.PlonePAS.interfaces.plugins import \
                    IUserManagement
            >>> plugins = self.uf.plugins
            >>> plugins.activatePlugin(IPropertiesPlugin, 'shib')
            >>> plugins.activatePlugin(IUserEnumerationPlugin, 'shib')
            >>> plugins.activatePlugin(IUserManagement, 'shib')

            >>> self.shib.store = {'B_0-_88s2CiUXmJx-PYW_8TugZI': {
            ...     u'HTTP_SHARED_TOKEN': 'B_0-_88s2CiUXmJx-PYW_8TugZI',
            ...     u'HTTP_CN': 'David B',
            ...     u'HTTP_MAIL': '*****@*****.**',
            ...     u'HTTP_REMOTE_USER':'******',
            ...     }
            ... }
            >>> u = self.app.acl_users.getUser('B_0-_88s2CiUXmJx-PYW_8TugZI')
            >>> print u
            B_0-_88s2CiUXmJx-PYW_8TugZI

            >>> self.app.acl_users.doDeleteUser('B_0-_88s2CiUXmJx-PYW_8TugZI')

            >>> 'B_0-_88s2CiUXmJx-PYW_8TugZI' in self.shib.store

        Given a Shibboleth ID (shared token, typically), delete that user
        """
        if not self.allowDeletePrincipal(principal_id):
            raise KeyError, 'Invalid user ID: %s' % principal_id

        del self.store[principal_id]
        return True

    #
    #    IExtractionPlugin implementation
    #
    security.declarePrivate('extractCredentials')
    def extractCredentials(self, request):
        """Extract the credentials
        """
        session = request.SESSION
        if session.has_key('shibboleth.session') and session.has_key('shibboleth.id'):
            return {"shibboleth.session": session.get('shibboleth.session'),
                    "shibboleth.id": session.get('shibboleth.id')}

        session_id = self.__getShibbolethSessionId(request)
        self.log(DEBUG, "extractCredentials: %s" % session_id)
        if not session_id:
            self.log(DEBUG, "extractCredentials: Not Shib")
            return {}

        id, attributes = self.__extract_shib_data(request)
        session['shibboleth.id'] = id
        # if not Pseudo-Anonymous then store the users details
        if not id == session_id:
            # Store the users attribute in the tool and in the users session
            self.store[id] = attributes

        # set session level variables so that we won't need to keep authing
        session.set('shibboleth.session', session_id)
        session.set('shibboleth.id', id)

        # Doesn't return login/password because no other tool can help authentication
        return {"shibboleth.session": session_id, "shibboleth.id": id}


    #
    #    IRolesPlugin implementation
    #
    security.declarePrivate('getRolesForPrincipal')
    def getRolesForPrincipal(self, principal, request=None):
        """

            >>> from zope.publisher.browser import TestRequest
            >>> shib_headers = { \
              'HTTP_REMOTE_USER': '******', \
              'HTTP_SHIB_SESSION_ID' : "_9c86b438e92e1de9b378a23f4838a959", }
            >>> request = TestRequest(**shib_headers)
            >>> request.SESSION = self.app.session_data_manager.getSessionData()
            >>> self.app.acl_users.shib.REQUEST.environ.update({'HTTP_REMOTE_USER': '******'})
            >>> self.shib.REQUEST.SESSION = self.app.session_data_manager.getSessionData()
            >>> from Products.PluggableAuthService.plugins.tests.helpers import DummyUser
            >>> self.shib.getRolesForPrincipal(DummyUser('*****@*****.**'), request)
            ()
        """
        self.log(INFO, "Principal: %s"%principal)
        if not hasattr(self,'_v_compiled_mapping_func_map'):
            self.__compileMappings()
        return self.__caculateMapping(principal.getId(), self._v_compiled_mapping_func_map[Constants.RoleM])


    #
    #    IGroupsPlugin implementation
    #
    security.declarePrivate('getRolesForPrincipal')
    def getGroupsForPrincipal(self, principal, request=None):
        """

            >>> from zope.publisher.browser import TestRequest
            >>> shib_headers = { \
              'HTTP_REMOTE_USER': '******', \
              'HTTP_SHIB_SESSION_ID' : "_9c86b438e92e1de9b378a23f4838a959", }
            >>> request = TestRequest(**shib_headers)
            >>> request.SESSION = self.app.session_data_manager.getSessionData()
            >>> self.app.acl_users.shib.REQUEST.environ.update({'HTTP_REMOTE_USER': '******'})
            >>> self.shib.REQUEST.SESSION = self.app.session_data_manager.getSessionData()
            >>> from Products.PluggableAuthService.plugins.tests.helpers import DummyUser
            >>> self.shib.getGroupsForPrincipal(DummyUser('*****@*****.**'), request)
            ()
        """
        if not hasattr(self,'_v_compiled_mapping_func_map'):
            self.__compileMappings()
        return self.__caculateMapping(principal.getId(), self._v_compiled_mapping_func_map[Constants.GroupM])


    #
    #   IUserEnumerationPlugin implementation
    #
    security.declarePrivate('enumerateUsers')
    def enumerateUsers(self, id=None, login=None, exact_match=False, sort_by=None, max_results=None, **kw):
        """ See IUserEnumerationPlugin.
        """
        user_info = []
        user_ids = []
        plugin_id = self.getId()

        if isinstance( id, basestring ):
            id = [id]

        if isinstance( login, basestring ):
            login = [login]

        if not user_ids:
            user_ids = self.listUserIds()
            user_filter = _ShibUserFilter(id, login, exact_match, self.rattr_map, **kw)

        if not id and not login and not kw:
            user_filter = None

        for user_id in user_ids:
            data = self.store.get(user_id)
            if data:
                e_url = '%s/manage_users' % self.getId()
                qs = 'user_id=%s' % user_id

                fullname = data.get(self.rattr_map.get('fullname')) \
                        or data.get(self.rattr_map.get('fullname_fallback'), \
                                                       user_id)
                info = { 'id' : self.prefix + user_id
                       , 'login' : user_id
                       , 'pluginid' : plugin_id
                       , 'email' : data.get(self.rattr_map.get('email'), '')
                       , 'title' : fullname
                       , 'description' : fullname
                       , 'editurl' : '%s?%s' % (e_url, qs)
                       }
                if not user_filter or user_filter(user_id, user_id, data):
                    user_info.append(info)

        return tuple(user_info)


    #
    #   IPropertiesPlugin implementation
    #
    security.declarePrivate('getPropertiesForUser')
    def getPropertiesForUser(self, user, request=None ):
        """return the immutabel shibboleth properties of a user

            >>> from Products.PluggableAuthService.interfaces.plugins import \
                    IPropertiesPlugin, IUserEnumerationPlugin
            >>> plugins = self.uf.plugins
            >>> plugins.activatePlugin(IPropertiesPlugin, 'shib')
            >>> plugins.activatePlugin(IUserEnumerationPlugin, 'shib')

            >>> self.shib.store = {'matthew': {u'HTTP_DISPLAYNAME': 'Matthew Morgan', u'HTTP_MAIL': '*****@*****.**', u'HTTP_REMOTE_USER': '******',}}
            >>> u = self.app.acl_users.getUser('matthew')
            >>> u.listPropertysheets()
            ['shib']
            >>> prop = u.getPropertysheet('shib')
            >>> print prop.propertyItems()
            [('email', '*****@*****.**'), ('fullname', 'Matthew Morgan'), ('location', None)]

            Test for an instance where IdP doesn't provide our display name

            >>> self.shib.store = {'david': {
            ...     u'HTTP_CN': 'David B',
            ...     u'HTTP_MAIL': '*****@*****.**',
            ...     u'HTTP_REMOTE_USER':'******',
            ...     }
            ... }
            >>> u = self.app.acl_users.getUser('david')
            >>> u.listPropertysheets()
            ['shib']
            >>> prop = u.getPropertysheet('shib')
            >>> print prop.propertyItems()
            [('email', '*****@*****.**'), ('fullname', 'David B'), ('location', None)]

            Test for instance where our display name should take precedence

            >>> self.shib.store = {'david': {
            ...     u'HTTP_DISPLAYNAME': 'Real Name',
            ...     u'HTTP_CN': 'Override me',
            ...     u'HTTP_MAIL': '*****@*****.**',
            ...     u'HTTP_REMOTE_USER':'******',
            ...     }
            ... }
            >>> u = self.app.acl_users.getUser('david')
            >>> u.listPropertysheets()
            ['shib']
            >>> prop = u.getPropertysheet('shib')
            >>> print prop.propertyItems()
            [('email', '*****@*****.**'), ('fullname', 'Real Name'), ('location', None)]

            Test for the situation where there aren't any names defined at all.
            >>> self.shib.store = {'david': {
            ...     u'HTTP_MAIL': '*****@*****.**',
            ...     u'HTTP_REMOTE_USER':'******',
            ...     }
            ... }
            >>> u = self.app.acl_users.getUser('david')
            >>> prop = u.getPropertysheet('shib')
            >>> print prop.propertyItems()
            [('email', '*****@*****.**'), ('fullname', 'Unknown (Name not supplied)'), ('location', None)]


            test missing shibboleth attribute

            >>> self.shib.store = {'matthew': {u'HTTP_MAIL': '*****@*****.**', u'HTTP_REMOTE_USER': '******', u'HTTP_DISPLAYNAME': 'Matthew Morgan'}}
            >>> u = self.app.acl_users.getUser('matthew')
            >>> u.listPropertysheets()
            ['shib']
        """
        userdata = self.store.get(user.getId())
        schema = [('email', 'string'),
                  ('fullname', 'string'),
                  ('location', 'string'),
                 ]
        data = {}
        if not userdata:
            return None
        for k, v in self.attr_map.items():
            if userdata.has_key(k):
                data[v] = userdata[k]
            elif v == 'fullname':
                data[v] = userdata.get('HTTP_CN', \
                                       'Unknown (Name not supplied)')

        return UserPropertySheet(self.id, schema=schema, **data)


    #
    # IUserIntrospection implementation
    #

    def getUserIds(self):
        """
        Return a list of user ids
        """
        return self.listUserIds()

    def getUserNames(self):
        """
        Return a list of usernames
        """
        return [x['login_name'] for x in self.listUserInfo()]

    def getUsers(self):
        """
        Return a list of users
        """
        uf = self.acl_users
        return [uf.getUserById(x) for x in self.getUserIds()]


    security.declarePrivate('getLoginURL')
    def getLoginURL(self):
        """ Where to send people for logging in """
        if self.login_path.startswith('/'):
            return self.login_path
        elif self.login_path != '':
            return '%s/%s' % (self.absolute_url(), self.login_path)
        else:
            return None


    #
    #   (notional)IZODBUserManager interface
    #
    security.declareProtected(ManageUsers, 'listUserIds')
    def listUserIds(self):

        """ -> ( user_id_1, ... user_id_n )
        """
        return self.store.keys()


    security.declarePrivate('__extract_shib_data')
    def __extract_shib_data(self, request):
        """
        Extracts Shibboleth information for the headers. Return a tuple containing the unique identifier of the user and dict of other shibboleth headers.

            >>> from zope.publisher.browser import TestRequest
            >>> shib_headers = { 'HTTP_SHIB_APPLICATION_ID' : "default", \
              'HTTP_SHIB_AUTHENTICATION_INSTANT' : "2010-02-18T22:46:12.140Z", \
              'HTTP_SHIB_AUTHENTICATION_METHOD' : "urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport", \
              'HTTP_SHIB_AUTHNCONTEXT_CLASS' : "urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport", \
              'HTTP_SHIB_IDENTITY_PROVIDER' : "https://idp.test.org/idp/shibboleth", \
              'HTTP_SHIB_SESSION_ID' : "_9c86b438e92e1de9b378a23f4838a959", \
              'HTTP_AFFILIATION' : "*****@*****.**", \
              'HTTP_ASSURANCE' : "2", \
              'HTTP_CN' : "Russell Sim", \
              'HTTP_DISPLAYNAME' : "Russell Sim", \
              'HTTP_EPPN' : "*****@*****.**", \
              'HTTP_GIVENNAME' : "Russell", \
              'HTTP_HOMEORGANIZATION' : "vpac.org", \
              'HTTP_HOMEORGANIZATIONTYPE' : "urn:mace:aaf.edu.au:marcs", \
              'HTTP_L' : "AU", \
              'HTTP_MAIL' : "*****@*****.**", \
              'HTTP_O' : "VPAC", \
              'HTTP_PERSISTENT_ID' : "https://idp.test.org/idp/shibboleth!https://testhost.com/shibboleth!P4o6lbbg41Q=", \
              'HTTP_SHARED_TOKEN' : "B_0-_88s2CiUXmJx-PYW_8TugZI", \
              'HTTP_SN' : "Sim", \
              'HTTP_UNSCOPED_AFFILIATION' : "staff", }
            >>> request = TestRequest(**shib_headers)
            >>> self.shib._ShibbolethHelper__extract_shib_data(request)
              ('B_0-_88s2CiUXmJx-PYW_8TugZI', \
              {u'HTTP_PERSISTENT_ID': 'https://idp.test.org/idp/shibboleth!https://testhost.com/shibboleth!P4o6lbbg41Q=', \
              u'HTTP_SN': 'Sim', \
              u'HTTP_MAIL': '*****@*****.**', \
              u'HTTP_SHIB_APPLICATION_ID': 'default', \
              u'HTTP_EPPN': '*****@*****.**', \
              u'HTTP_GIVENNAME': 'Russell', \
              u'HTTP_SHIB_AUTHNCONTEXT_CLASS': 'urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport', \
              u'HTTP_CN': 'Russell Sim', \
              u'HTTP_O': 'VPAC', \
              u'HTTP_L': 'AU', \
              u'HTTP_UNSCOPED_AFFILIATION': 'staff', \
              u'HTTP_DISPLAYNAME': 'Russell Sim', \
              u'HTTP_AFFILIATION': '*****@*****.**', \
              u'HTTP_SHIB_AUTHENTICATION_INSTANT': '2010-02-18T22:46:12.140Z'})

            >>> shib_headers = { 'HTTP_SHIB_APPLICATION_ID' : "default", \
              'HTTP_SHIB_AUTHENTICATION_INSTANT' : "2010-02-18T22:46:12.140Z", \
              'HTTP_SHIB_AUTHENTICATION_METHOD' : "urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport", \
              'HTTP_SHIB_AUTHNCONTEXT_CLASS' : "urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport", \
              'HTTP_SHIB_IDENTITY_PROVIDER' : "https://idp.test.org/idp/shibboleth", \
              'HTTP_SHIB_SESSION_ID' : "_9c86b438e92e1de9b378a23f4838a959", \
              'HTTP_PERSISTENT_ID' : "https://idp.test.org/idp/shibboleth!https://testhost.com/shibboleth!P4o6lbbg41Q=", \
              'HTTP_DISPLAYNAME' : "Russell Sim", \
              'HTTP_REMOTE_USER': '******'}
            >>> request = TestRequest(**shib_headers)
            >>> self.shib._ShibbolethHelper__extract_shib_data(request)
              ('_9c86b438e92e1de9b378a23f4838a959', \
              {u'HTTP_PERSISTENT_ID': 'https://idp.test.org/idp/shibboleth!https://testhost.com/shibboleth!P4o6lbbg41Q=', \
              u'HTTP_REMOTE_USER': '******', \
              u'HTTP_SHIB_APPLICATION_ID': 'default', \
              u'HTTP_SHIB_AUTHNCONTEXT_CLASS': 'urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport', \
              u'HTTP_DISPLAYNAME': 'Russell Sim', \
              u'HTTP_SHIB_AUTHENTICATION_INSTANT': '2010-02-18T22:46:12.140Z'})
        """
        attributes={}

        for k in self.getPossibleAttributes():
            v = request.get(k, None)
            if v:
                attributes[k] = v

        uid_attr = self.userid_attribute
        if uid_attr.strip():
            if not (uid_attr in request.keys()):
                id = str(self.__getShibbolethSessionId(request))
                log.debug("User UID not supplied using handle: %s, from provider: %s." % (id, request[self.idp_attribute]))
            else:
                log.debug('id: %s, %s' % (uid_attr, request[uid_attr]))
                id = request[uid_attr]
        else:
            log.error("userid_attribute is not set to anything.")
        return id, attributes


    security.declarePrivate('__getShibbolethSessionId')
    def __getShibbolethSessionId(self, request):
        """
        Gets the Shibboleth Session ID from the Request

            >>> shib_headers = { 'HTTP_SHIB_SESSION_ID' : "_9c86b438e92e1de9b378a23f4838a959", }
            >>> self.app.acl_users.shib.REQUEST.environ.update(shib_headers)
            >>> self.shib._ShibbolethHelper__getShibbolethSessionId(self.shib.REQUEST)
            '_9c86b438e92e1de9b378a23f4838a959'
        """
        return request.get('HTTP_SHIB_SESSION_ID', None)


    security.declarePrivate('__validShibSession')
    def __validShibSession(self, request):
        """
        Check that the request shib session id matches the one in the session manager.

            >>> from zope.publisher.browser import TestRequest
            >>> shib_headers = { 'HTTP_SHIB_SESSION_ID' : "_9c86b438e92e1de9b378a23f4838a959", }
            >>> request = TestRequest(**shib_headers)
            >>> request.SESSION = self.app.session_data_manager.getSessionData()
            >>> self.shib._ShibbolethHelper__validShibSession(request)
            False

            >>> request.SESSION = self.app.session_data_manager.getSessionData()
            >>> request.SESSION['shibboleth.session'] = '_9c86b438e92e1de9b378a23f4838a959'
            >>> self.shib._ShibbolethHelper__validShibSession(request)
            True
        """

        sesid = self.__getShibbolethSessionId(request)
        if request.SESSION.has_key('shibboleth.session'):
            return request.SESSION['shibboleth.session'] == sesid
        return False



    security.declarePrivate('__getIPAddress')
    def __getIPAddress(self, request):
        """
        Get the IP Address

            >>> from ZPublisher.HTTPRequest import HTTPRequest
            >>> from StringIO import StringIO
            >>> request = HTTPRequest(StringIO(), {'REQUEST_METHOD': 'GET', \
              'SERVER_NAME': 'localhost', \
              'SERVER_PORT': '80', \
              'REMOTE_ADDR':'137.219.45.111', \
              'HTTP_X_FORWARDED_FOR': ''}, None)
            >>> self.shib._ShibbolethHelper__getIPAddress(request)
            '137.219.45.111'

            >>> from zope.publisher.browser import TestRequest
            >>> request = TestRequest(**{'HTTP_X_FORWARDED_FOR': '137.219.45.217'})
            >>> self.shib._ShibbolethHelper__getIPAddress(request)
            '137.219.45.217'
        """
        self.log(DEBUG, "__getIPAddress: %s" % request)
        # TODO probably should advise the user about untrusted proxies
        toRet = request['HTTP_X_FORWARDED_FOR']
        if not toRet:
            toRet = request.getClientAddr()
        self.log(DEBUG, "__getIPAddress: %s" % toRet)
        return toRet


    security.declarePrivate('log')
    def log(self, level, message):
        """
        Log a message for this object.
        """
        log.log(level, ": "+ str(message))


    security.declarePrivate('__caculateMapping')
    def __caculateMapping(self, principal, funcs):
        self.log(DEBUG, "__caculateMapping: %s, %s" % (principal, funcs))
        toRet = []
        attrs = self.store.get(principal, {})
        assign_targets = funcs
        for role in assign_targets:
            try:
                if assign_targets[role](attrs): toRet.append(role)
            except Exception, e:
                self.log(INFO,"Error occoured whilst assiging target: %s" % role)
        self.log(DEBUG, "__caculateMapping: %s" % toRet)
        return tuple(toRet)
Exemple #22
0
class CSFacebookUsers(BasePlugin):
    """PAS plugin for authentication against facebook.

    Here, we implement a number of PAS interfaces, using session_data_manager
    to temporarily store the values we have captured.
    """

    # List PAS interfaces we implement here
    implements(ICSFacebookPlugin, IExtractionPlugin, ICredentialsResetPlugin,
               IAuthenticationPlugin, IPropertiesPlugin,
               IUserEnumerationPlugin, IUserFactoryPlugin)

    def __init__(self, id, title=None):
        self.__name__ = self.id = id
        self.title = title
        self._storage = OOBTree()

    #
    # IExtractionPlugin
    #
    def extractCredentials(self, request):
        """This method is called on every request to extract credentials.
        In our case, that means looking for the values we store in the
        session.

        o Return a mapping of any derived credentials.

        o Return an empty mapping to indicate that the plugin found no
          appropriate credentials.
        """

        # Get the session from session_data_manager
        sdm = getToolByName(getSite(), "session_data_manager")
        session = sdm.getSessionData(create=False)

        if session is None:
            return None

        # We have been authenticated and we have a session that has not yet
        # expired:
        if SessionKeys.userId in session.keys():
            data = {
                'src': self.getId(),
                'userid': session[SessionKeys.userId],
                'username': session[SessionKeys.userName],
            }
            return data

        return None

    #
    # IAuthenticationPlugin
    #

    def authenticateCredentials(self, credentials):
        """This method is called with the credentials that have been
        extracted by extractCredentials(), to determine whether the user is
        authenticated or not.

        We basically trust our session data, so if the session contains a
        user, we treat them as authenticated. Other systems may have more
        stringent needs, but we should avoid an expensive check here, as this
        method may be called very often - at least once per request.

        credentials -> (userid, login)

        o 'credentials' will be a mapping, as returned by extractCredentials().

        o Return a  tuple consisting of user ID (which may be different
          from the login name) and login

        o If the credentials cannot be authenticated, return None.
        """

        # If we didn't extract, ignore
        if credentials.get('src', None) != self.getId():
            return

        # We have a session, which was identified by extractCredentials above.
        # Trust that it extracted the correct user id and login name
        if ('userid' in credentials and 'username' in credentials):
            return (
                credentials['userid'],
                credentials['username'],
            )

        return None

    #
    # ICredentialsResetPlugin
    #

    def resetCredentials(self, request, response):
        """This method is called if the user logs out.

        Here, we simply destroy their session.
        """
        sdm = getToolByName(getSite(), "session_data_manager")
        session = sdm.getSessionData(create=False)
        if session is None:
            return
        session.invalidate()

    #
    # IPropertiesPlugin
    #

    def getPropertiesForUser(self, user, request=None):
        """This method is called whenever Plone needs to get properties for
        a user. We return a dictionary with properties that map to those
        Plone expect.

         user -> {}

        o User will implement IPropertiedUser.

        o Plugin should return a dictionary or an object providing
          IPropertySheet.

        o Plugin may scribble on the user, if needed (but must still
          return a mapping, even if empty).

        o May assign properties based on values in the REQUEST object, if
          present
        """
        # If this is a Facebook User, it implements IFacebookUser
        if not IFacebookUser.providedBy(user):
            return {}

        else:
            user_data = self._storage.get(user.getId(), None)
            if user_data is None:
                return {}

            return user_data

    #
    # IUserEnumerationPlugin
    #
    def enumerateUsers(self,
                       id=None,
                       login=None,
                       exact_match=False,
                       sort_by=None,
                       max_results=None,
                       **kw):
        """This function is used to search for users.

        We don't implement a search of all of Facebook (!)

        """
        def match(data, criterias, exact_match=False):
            """Search for users with the given criterias"""
            for propertyname, searchstring in criterias.items():
                stored_value = data.get(propertyname, None)
                if stored_value is None:
                    return False

                if not isinstance(stored_value, unicode):
                    stored_value = stored_value.decode('utf-8')
                if not isinstance(searchstring, unicode):
                    searchstring = searchstring.decode('utf-8')

                if exact_match and searchstring != stored_value:
                    return False
                else:
                    if not searchstring:
                        return False

                    if searchstring.lower() in stored_value.lower():
                        return True
            return False

        if exact_match:
            if id is not None or login is not None:
                # No diffrents between id and login
                name = id or login
                data = self._storage.get(name, None)
                if data is not None:
                    return ({
                        'id': name,
                        'login': data.get('username'),
                        'title': data.get('username'),
                        'pluginid': self.getId()
                    }, )
                else:
                    return ()
        criterias = copy(kw)
        result = [(userid, data) for (userid, data) in self._storage.items()
                  if match(data, criterias, exact_match)]

        return tuple([{
            'id': user_id,
            'login': data.get('username'),
            'title': data.get('username'),
            'pluginid': self.getId()
        } for (user_id, data) in result])

    # IUserFactoryPlugin interface
    def createUser(self, user_id, name):
        # Create a FacebookUser just if this is a Facebook User id
        user_data = self._storage.get(user_id, None)
        if user_data is not None:
            return FacebookUser(user_id, name)

        return None
class ZODBRoleManager( BasePlugin ):

    """ PAS plugin for managing roles in the ZODB.
    """
    meta_type = 'ZODB Role Manager'

    security = ClassSecurityInfo()

    def __init__(self, id, title=None):

        self._id = self.id = id
        self.title = title

        self._roles = OOBTree()
        self._principal_roles = OOBTree()

    def manage_afterAdd( self, item, container ):

        if item is self:
            role_holder = aq_parent( aq_inner( container ) )
            for role in getattr( role_holder, '__ac_roles__', () ):
                try:
                    if role not in ('Anonymous', 'Authenticated'):
                        self.addRole( role )
                except KeyError:
                    pass

        if 'Manager' not in self._roles:
            self.addRole( 'Manager' )

    #
    #   IRolesPlugin implementation
    #
    security.declarePrivate( 'getRolesForPrincipal' )
    def getRolesForPrincipal( self, principal, request=None ):

        """ See IRolesPlugin.
        """
        result = list( self._principal_roles.get( principal.getId(), () ) )

        getGroups = getattr( principal, 'getGroups', lambda x: () )
        for group_id in getGroups():
            result.extend( self._principal_roles.get( group_id, () ) )

        return tuple( result )

    #
    #   IRoleEnumerationPlugin implementation
    #
    def enumerateRoles( self
                      , id=None
                      , exact_match=False
                      , sort_by=None
                      , max_results=None
                      , **kw
                      ):

        """ See IRoleEnumerationPlugin.
        """
        role_info = []
        role_ids = []
        plugin_id = self.getId()

        if isinstance( id, str ):
            id = [ id ]

        if exact_match and ( id ):
            role_ids.extend( id )

        if role_ids:
            role_filter = None

        else:   # Searching
            role_ids = self.listRoleIds()
            role_filter = _ZODBRoleFilter( id, **kw )

        for role_id in role_ids:

            if self._roles.get( role_id ):
                e_url = '%s/manage_roles' % self.getId()
                p_qs = 'role_id=%s' % role_id
                m_qs = 'role_id=%s&assign=1' % role_id

                info = {}
                info.update( self._roles[ role_id ] )

                info[ 'pluginid' ] = plugin_id
                info[ 'properties_url'  ] = '%s?%s' % (e_url, p_qs)
                info[ 'members_url'  ] = '%s?%s' % (e_url, m_qs)

                if not role_filter or role_filter( info ):
                    role_info.append( info )

        return tuple( role_info )

    #
    #   IRoleAssignerPlugin implementation
    #
    security.declarePrivate( 'doAssignRoleToPrincipal' )
    def doAssignRoleToPrincipal( self, principal_id, role ):
        return self.assignRoleToPrincipal( role, principal_id )

    security.declarePrivate( 'doRemoveRoleFromPrincipal' )
    def doRemoveRoleFromPrincipal( self, principal_id, role ):
        return self.removeRoleFromPrincipal( role, principal_id )

    #
    #   Role management API
    #
    security.declareProtected( ManageUsers, 'listRoleIds' )
    def listRoleIds( self ):

        """ Return a list of the role IDs managed by this object.
        """
        return self._roles.keys()

    security.declareProtected( ManageUsers, 'listRoleInfo' )
    def listRoleInfo( self ):

        """ Return a list of the role mappings.
        """
        return self._roles.values()

    security.declareProtected( ManageUsers, 'getRoleInfo' )
    def getRoleInfo( self, role_id ):

        """ Return a role mapping.
        """
        return self._roles[ role_id ]

    security.declarePrivate( 'addRole' )
    def addRole( self, role_id, title='', description='' ):

        """ Add 'role_id' to the list of roles managed by this object.

        o Raise KeyError on duplicate.
        """
        if self._roles.get( role_id ) is not None:
            raise KeyError('Duplicate role: %s' % role_id)

        self._roles[ role_id ] = { 'id' : role_id
                                 , 'title' : title
                                 , 'description' : description
                                 }

    security.declarePrivate( 'updateRole' )
    def updateRole( self, role_id, title, description ):

        """ Update title and description for the role.

        o Raise KeyError if not found.
        """
        self._roles[ role_id ].update( { 'title' : title
                                       , 'description' : description
                                       } )

    security.declarePrivate( 'removeRole' )
    def removeRole( self, role_id, REQUEST=None ):

        """ Remove 'role_id' from the list of roles managed by this object.

        o Raise KeyError if not found.

        Note that if you really want to remove a role you should first
        remove it from the roles in the root of the site (at the
        bottom of the Security tab at manage_access).
        """
        for principal_id in self._principal_roles.keys():
            self.removeRoleFromPrincipal( role_id, principal_id )

        del self._roles[ role_id ]

    #
    #   Role assignment API
    #
    security.declareProtected( ManageUsers, 'listAvailablePrincipals' )
    def listAvailablePrincipals( self, role_id, search_id ):

        """ Return a list of principal IDs to whom a role can be assigned.

        o If supplied, 'search_id' constrains the principal IDs;  if not,
          return empty list.

        o Omit principals with existing assignments.
        """
        result = []

        if search_id:  # don't bother searching if no criteria

            parent = aq_parent( self )

            for info in parent.searchPrincipals( max_results=20
                                               , sort_by='id'
                                               , id=search_id
                                               , exact_match=False
                                               ):
                id = info[ 'id' ]
                title = info.get( 'title', id )
                if ( role_id not in self._principal_roles.get( id, () )
                 and role_id != id ):
                    result.append( ( id, title ) )

        return result

    security.declareProtected( ManageUsers, 'listAssignedPrincipals' )
    def listAssignedPrincipals( self, role_id ):

        """ Return a list of principal IDs to whom a role is assigned.
        """
        result = []

        for k, v in self._principal_roles.items():
            if role_id in v:
                # should be at most one and only one mapping to 'k'

                parent = aq_parent( self )
                info = parent.searchPrincipals( id=k, exact_match=True )

                if len(info) > 1:
                    message = ("Multiple groups or users exist with the "
                               "name '%s'. Remove one of the duplicate groups "
                               "or users." % (k))
                    LOG.error(message)
                    raise MultiplePrincipalError(message)

                if len( info ) == 0:
                    title = '<%s: not found>' % k
                else:
                    title = info[0].get( 'title', k )
                result.append( ( k, title ) )

        return result

    security.declarePrivate( 'assignRoleToPrincipal' )
    def assignRoleToPrincipal( self, role_id, principal_id ):

        """ Assign a role to a principal (user or group).

        o Return a boolean indicating whether a new assignment was created.

        o Raise KeyError if 'role_id' is unknown.
        """
        role_info = self._roles[ role_id ] # raise KeyError if unknown!

        current = self._principal_roles.get( principal_id, () )
        already = role_id in current

        if not already:
            new = current + ( role_id, )
            self._principal_roles[ principal_id ] = new
            self._invalidatePrincipalCache( principal_id )

        return not already

    security.declarePrivate( 'removeRoleFromPrincipal' )
    def removeRoleFromPrincipal( self, role_id, principal_id ):

        """ Remove a role from a principal (user or group).

        o Return a boolean indicating whether the role was already present.

        o Raise KeyError if 'role_id' is unknown.

        o Ignore requests to remove a role not already assigned to the
          principal.
        """
        role_info = self._roles[ role_id ] # raise KeyError if unknown!

        current = self._principal_roles.get( principal_id, () )
        new = tuple( [ x for x in current if x != role_id ] )
        already = current != new

        if already:
            self._principal_roles[ principal_id ] = new
            self._invalidatePrincipalCache( principal_id )

        return already

    #
    #   ZMI
    #
    manage_options = ( ( { 'label': 'Roles',
                           'action': 'manage_roles', }
                         ,
                       )
                     + BasePlugin.manage_options
                     )

    security.declareProtected( ManageUsers, 'manage_roles' )
    manage_roles = PageTemplateFile( 'www/zrRoles'
                                   , globals()
                                   , __name__='manage_roles'
                                   )

    security.declareProtected( ManageUsers, 'manage_twoLists' )
    manage_twoLists = PageTemplateFile( '../www/two_lists'
                                      , globals()
                                      , __name__='manage_twoLists'
                                      )

    security.declareProtected( ManageUsers, 'manage_addRole' )
    @csrf_only
    @postonly
    def manage_addRole( self
                      , role_id
                      , title
                      , description
                      , RESPONSE=None
                      , REQUEST=None
                      ):
        """ Add a role via the ZMI.
        """
        self.addRole( role_id, title, description )

        message = 'Role+added'

        if RESPONSE is not None:
            RESPONSE.redirect( '%s/manage_roles?manage_tabs_message=%s'
                            % ( self.absolute_url(), message )
                            )

    security.declareProtected( ManageUsers, 'manage_updateRole' )
    @csrf_only
    @postonly
    def manage_updateRole( self
                         , role_id
                         , title
                         , description
                         , RESPONSE=None
                         , REQUEST=None
                         ):
        """ Update a role via the ZMI.
        """
        self.updateRole( role_id, title, description )

        message = 'Role+updated'

        if RESPONSE is not None:
            RESPONSE.redirect( '%s/manage_roles?role_id=%s&'
                               'manage_tabs_message=%s'
                            % ( self.absolute_url(), role_id, message )
                            )

    security.declareProtected( ManageUsers, 'manage_removeRoles' )
    @csrf_only
    @postonly
    def manage_removeRoles( self
                          , role_ids
                          , RESPONSE=None
                          , REQUEST=None
                          ):
        """ Remove one or more role assignments via the ZMI.

        Note that if you really want to remove a role you should first
        remove it from the roles in the root of the site (at the
        bottom of the Security tab at manage_access).
        """
        role_ids = filter( None, role_ids )

        if not role_ids:
            message = 'no+roles+selected'

        else:

            for role_id in role_ids:
                self.removeRole( role_id )

            message = 'Role+assignments+removed'

        if RESPONSE is not None:
            RESPONSE.redirect( '%s/manage_roles?manage_tabs_message=%s'
                            % ( self.absolute_url(), message )
                            )

    security.declareProtected( ManageUsers, 'manage_assignRoleToPrincipals' )
    @csrf_only
    @postonly
    def manage_assignRoleToPrincipals( self
                                     , role_id
                                     , principal_ids
                                     , RESPONSE
                                     , REQUEST=None
                                     ):
        """ Assign a role to one or more principals via the ZMI.
        """
        assigned = []

        for principal_id in principal_ids:
            if self.assignRoleToPrincipal( role_id, principal_id ):
                assigned.append( principal_id )

        if not assigned:
            message = 'Role+%s+already+assigned+to+all+principals' % role_id
        else:
            message = 'Role+%s+assigned+to+%s' % ( role_id
                                                 , '+'.join( assigned )
                                                 )

        if RESPONSE is not None:
            RESPONSE.redirect( ( '%s/manage_roles?role_id=%s&assign=1'
                            + '&manage_tabs_message=%s'
                            ) % ( self.absolute_url(), role_id, message )
                            )

    security.declareProtected( ManageUsers, 'manage_removeRoleFromPrincipals' )
    @csrf_only
    @postonly
    def manage_removeRoleFromPrincipals( self
                                       , role_id
                                       , principal_ids
                                       , RESPONSE=None
                                       , REQUEST=None
                                       ):
        """ Remove a role from one or more principals via the ZMI.
        """
        removed = []

        for principal_id in principal_ids:
            if self.removeRoleFromPrincipal( role_id, principal_id ):
                removed.append( principal_id )

        if not removed:
            message = 'Role+%s+alread+removed+from+all+principals' % role_id
        else:
            message = 'Role+%s+removed+from+%s' % ( role_id
                                                  , '+'.join( removed )
                                                  )

        if RESPONSE is not None:
            RESPONSE.redirect( ( '%s/manage_roles?role_id=%s&assign=1'
                            + '&manage_tabs_message=%s'
                            ) % ( self.absolute_url(), role_id, message )
                            )
class ZODBUserManager( BasePlugin, Cacheable ):

    """ PAS plugin for managing users in the ZODB.
    """

    meta_type = 'ZODB User Manager'

    security = ClassSecurityInfo()

    def __init__(self, id, title=None):

        self._id = self.id = id
        self.title = title

        self._user_passwords = OOBTree()
        self._login_to_userid = OOBTree()
        self._userid_to_login = OOBTree()

    #
    #   IAuthenticationPlugin implementation
    #
    security.declarePrivate( 'authenticateCredentials' )
    def authenticateCredentials( self, credentials ):

        """ See IAuthenticationPlugin.

        o We expect the credentials to be those returned by
          ILoginPasswordExtractionPlugin.
        """
        login = credentials.get( 'login' )
        password = credentials.get( 'password' )

        if login is None or password is None:
            return None

        # Do we have a link between login and userid?  Do NOT fall
        # back to using the login as userid when there is no match, as
        # that gives a high chance of seeming to log in successfully,
        # but in reality failing.
        userid = self._login_to_userid.get(login)
        if userid is None:
            # Someone may be logging in with a userid instead of a
            # login name and the two are not the same.  We could try
            # turning those around, but really we should just fail.
            #
            # userid = login
            # login = self._userid_to_login.get(userid)
            # if login is None:
            #     return None
            return None

        reference = self._user_passwords.get(userid)

        if reference is None:
            return None

        if AuthEncoding.is_encrypted( reference ):
            if AuthEncoding.pw_validate( reference, password ):
                return userid, login

        # Support previous naive behavior
        digested = sha( password ).hexdigest()

        if reference == digested:
            return userid, login

        return None

    #
    #   IUserEnumerationPlugin implementation
    #
    security.declarePrivate( 'enumerateUsers' )
    def enumerateUsers( self
                      , id=None
                      , login=None
                      , exact_match=False
                      , sort_by=None
                      , max_results=None
                      , **kw
                      ):

        """ See IUserEnumerationPlugin.
        """
        user_info = []
        user_ids = []
        plugin_id = self.getId()
        view_name = createViewName('enumerateUsers', id or login)


        if isinstance( id, basestring ):
            id = [ id ]

        if isinstance( login, basestring ):
            login = [ login ]

        # Look in the cache first...
        keywords = copy.deepcopy(kw)
        keywords.update( { 'id' : id
                         , 'login' : login
                         , 'exact_match' : exact_match
                         , 'sort_by' : sort_by
                         , 'max_results' : max_results
                         }
                       )
        cached_info = self.ZCacheable_get( view_name=view_name
                                         , keywords=keywords
                                         , default=None
                                         )
        if cached_info is not None:
            return tuple(cached_info)

        terms = id or login

        if exact_match:
            if terms:

                if id:
                    # if we're doing an exact match based on id, it
                    # absolutely will have been qualified (if we have a
                    # prefix), so we can ignore any that don't begin with
                    # our prefix
                    id = [ x for x in id if x.startswith(self.prefix) ]
                    user_ids.extend( [ x[len(self.prefix):] for x in id ] )
                elif login:
                    user_ids.extend( [ self._login_to_userid.get( x )
                                       for x in login ] )

                # we're claiming an exact match search, if we still don't
                # have anything, better bail.
                if not user_ids:
                    return ()
            else:
                # insane - exact match with neither login nor id
                return ()

        if user_ids:
            user_filter = None

        else:   # Searching
            user_ids = self.listUserIds()
            user_filter = _ZODBUserFilter( id, login, **kw )

        for user_id in user_ids:

            if self._userid_to_login.get( user_id ):
                e_url = '%s/manage_users' % self.getId()
                qs = 'user_id=%s' % user_id

                info = { 'id' : self.prefix + user_id
                       , 'login' : self._userid_to_login[ user_id ]
                       , 'pluginid' : plugin_id
                       , 'editurl' : '%s?%s' % (e_url, qs)
                       }

                if not user_filter or user_filter( info ):
                    user_info.append( info )

        # Put the computed value into the cache
        self.ZCacheable_set(user_info, view_name=view_name, keywords=keywords)

        return tuple( user_info )

    #
    #   IUserAdderPlugin implementation
    #
    security.declarePrivate( 'doAddUser' )
    def doAddUser( self, login, password ):
        try:
            self.addUser( login, login, password )
        except KeyError:
            return False
        return True

    #
    #   (notional)IZODBUserManager interface
    #
    security.declareProtected( ManageUsers, 'listUserIds' )
    def listUserIds( self ):

        """ -> ( user_id_1, ... user_id_n )
        """
        return self._user_passwords.keys()

    security.declareProtected( ManageUsers, 'getUserInfo' )
    def getUserInfo( self, user_id ):

        """ user_id -> {}
        """
        return { 'user_id' : user_id
               , 'login_name' : self._userid_to_login[ user_id ]
               , 'pluginid' : self.getId()
               }

    security.declareProtected( ManageUsers, 'listUserInfo' )
    def listUserInfo( self ):

        """ -> ( {}, ...{} )

        o Return one mapping per user, with the following keys:

          - 'user_id'
          - 'login_name'
        """
        return [ self.getUserInfo( x ) for x in self._user_passwords.keys() ]

    security.declareProtected( ManageUsers, 'getUserIdForLogin' )
    def getUserIdForLogin( self, login_name ):

        """ login_name -> user_id

        o Raise KeyError if no user exists for the login name.
        """
        return self._login_to_userid[ login_name ]

    security.declareProtected( ManageUsers, 'getLoginForUserId' )
    def getLoginForUserId( self, user_id ):

        """ user_id -> login_name

        o Raise KeyError if no user exists for that ID.
        """
        return self._userid_to_login[ user_id ]

    security.declarePrivate( 'addUser' )
    def addUser( self, user_id, login_name, password ):

        if self._user_passwords.get( user_id ) is not None:
            raise KeyError, 'Duplicate user ID: %s' % user_id

        if self._login_to_userid.get( login_name ) is not None:
            raise KeyError, 'Duplicate login name: %s' % login_name

        self._user_passwords[ user_id ] = self._pw_encrypt( password)
        self._login_to_userid[ login_name ] = user_id
        self._userid_to_login[ user_id ] = login_name

        # enumerateUsers return value has changed
        view_name = createViewName('enumerateUsers')
        self.ZCacheable_invalidate(view_name=view_name)

    security.declarePrivate('updateUser')
    def updateUser(self, user_id, login_name):

        # The following raises a KeyError if the user_id is invalid
        old_login = self.getLoginForUserId(user_id)

        if old_login != login_name:

            if self._login_to_userid.get(login_name) is not None:
                raise ValueError('Login name not available: %s' % login_name)

            del self._login_to_userid[old_login]
            self._login_to_userid[login_name] = user_id
            self._userid_to_login[user_id] = login_name
        # Signal success.
        return True

    security.declarePrivate('updateEveryLoginName')
    def updateEveryLoginName(self, quit_on_first_error=True):
        # Update all login names to their canonical value.  This
        # should be done after changing the login_transform property
        # of pas.  You can set quit_on_first_error to False to report
        # all errors before quitting with an error.  This can be
        # useful if you want to know how many problems there are, if
        # any.
        pas = self._getPAS()
        transform = pas._get_login_transform_method()
        if not transform:
            logger.warn("PAS has a non-existing, empty or wrong "
                        "login_transform property.")
            return

        # Make a fresh mapping, as we do not want to add or remove
        # items to the original mapping while we are iterating over
        # it.
        new_login_to_userid = OOBTree()
        errors = []
        for old_login_name, user_id in self._login_to_userid.items():
            new_login_name = transform(old_login_name)
            if new_login_name in new_login_to_userid:
                logger.error("User id %s: login name %r already taken.",
                             user_id, new_login_name)
                errors.append(new_login_name)
                if quit_on_first_error:
                    break
            new_login_to_userid[new_login_name] = user_id
            if new_login_name != old_login_name:
                self._userid_to_login[user_id] = new_login_name
                # Also, remove from the cache
                view_name = createViewName('enumerateUsers', user_id)
                self.ZCacheable_invalidate(view_name=view_name)
                logger.debug("User id %s: changed login name from %r to %r.",
                             user_id, old_login_name, new_login_name)

        # If there were errors, we do not want to save any changes.
        if errors:
            logger.error("There were %d errors when updating login names. "
                         "quit_on_first_error was %r", len(errors),
                         quit_on_first_error)
            # Make sure the exception we raise is not swallowed.
            self._dont_swallow_my_exceptions = True
            raise ValueError("Transformed login names are not unique: %s." %
                             ', '.join(errors))

        # Make sure we did not lose any users.
        assert(len(self._login_to_userid.keys())
               == len(new_login_to_userid.keys()))
        # Empty the main cache.
        view_name = createViewName('enumerateUsers')
        self.ZCacheable_invalidate(view_name=view_name)
        # Store the new login mapping.
        self._login_to_userid = new_login_to_userid

    security.declarePrivate( 'removeUser' )
    def removeUser( self, user_id ):

        if self._user_passwords.get( user_id ) is None:
            raise KeyError, 'Invalid user ID: %s' % user_id

        login_name = self._userid_to_login[ user_id ]

        del self._user_passwords[ user_id ]
        del self._login_to_userid[ login_name ]
        del self._userid_to_login[ user_id ]

        # Also, remove from the cache
        view_name = createViewName('enumerateUsers')
        self.ZCacheable_invalidate(view_name=view_name)
        view_name = createViewName('enumerateUsers', user_id)
        self.ZCacheable_invalidate(view_name=view_name)

    security.declarePrivate( 'updateUserPassword' )
    def updateUserPassword( self, user_id, password ):

        if self._user_passwords.get( user_id ) is None:
            raise KeyError, 'Invalid user ID: %s' % user_id

        if password:
            self._user_passwords[ user_id ] = self._pw_encrypt( password )

    security.declarePrivate( '_pw_encrypt' )
    def _pw_encrypt( self, password ):
        """Returns the AuthEncoding encrypted password

        If 'password' is already encrypted, it is returned
        as is and not encrypted again.
        """
        if AuthEncoding.is_encrypted( password ):
            return password
        return AuthEncoding.pw_encrypt( password )

    #
    #   ZMI
    #
    manage_options = ( ( { 'label': 'Users',
                           'action': 'manage_users', }
                         ,
                       )
                     + BasePlugin.manage_options
                     + Cacheable.manage_options
                     )

    security.declarePublic( 'manage_widgets' )
    manage_widgets = PageTemplateFile( 'www/zuWidgets'
                                     , globals()
                                     , __name__='manage_widgets'
                                     )

    security.declareProtected( ManageUsers, 'manage_users' )
    manage_users = PageTemplateFile( 'www/zuUsers'
                                   , globals()
                                   , __name__='manage_users'
                                   )

    security.declareProtected( ManageUsers, 'manage_addUser' )
    @csrf_only
    @postonly
    def manage_addUser( self
                      , user_id
                      , login_name
                      , password
                      , confirm
                      , RESPONSE=None
                      , REQUEST=None
                      ):
        """ Add a user via the ZMI.
        """
        if password != confirm:
            message = 'password+and+confirm+do+not+match'

        else:

            if not login_name:
                login_name = user_id

            # XXX:  validate 'user_id', 'login_name' against policies?

            self.addUser( user_id, login_name, password )

            message = 'User+added'

        if RESPONSE is not None:
            RESPONSE.redirect( '%s/manage_users?manage_tabs_message=%s'
                             % ( self.absolute_url(), message )
                             )

    security.declareProtected( ManageUsers, 'manage_updateUserPassword' )
    @csrf_only
    @postonly
    def manage_updateUserPassword( self
                                 , user_id
                                 , password
                                 , confirm
                                 , RESPONSE=None
                                 , REQUEST=None
                                 ):
        """ Update a user's login name / password via the ZMI.
        """
        if password and password != confirm:
            message = 'password+and+confirm+do+not+match'

        else:

            self.updateUserPassword( user_id, password )

            message = 'password+updated'

        if RESPONSE is not None:
            RESPONSE.redirect( '%s/manage_users?manage_tabs_message=%s'
                             % ( self.absolute_url(), message )
                             )

    security.declareProtected( ManageUsers, 'manage_updateUser' )
    @csrf_only
    @postonly
    def manage_updateUser(self
                         , user_id
                         , login_name
                         , RESPONSE=None
                         , REQUEST=None
                         ):
        """ Update a user's login name via the ZMI.
        """
        if not login_name:
            login_name = user_id

        # XXX:  validate 'user_id', 'login_name' against policies?

        self.updateUser(user_id, login_name)

        message = 'Login+name+updated'

        if RESPONSE is not None:
            RESPONSE.redirect( '%s/manage_users?manage_tabs_message=%s'
                             % ( self.absolute_url(), message )
                             )

    security.declareProtected( ManageUsers, 'manage_removeUsers' )
    @csrf_only
    @postonly
    def manage_removeUsers( self
                          , user_ids
                          , RESPONSE=None
                          , REQUEST=None
                          ):
        """ Remove one or more users via the ZMI.
        """
        user_ids = filter( None, user_ids )

        if not user_ids:
            message = 'no+users+selected'

        else:

            for user_id in user_ids:
                self.removeUser( user_id )

            message = 'Users+removed'

        if RESPONSE is not None:
            RESPONSE.redirect( '%s/manage_users?manage_tabs_message=%s'
                             % ( self.absolute_url(), message )
                             )

    #
    #   Allow users to change their own login name and password.
    #
    security.declareProtected( SetOwnPassword, 'getOwnUserInfo' )
    def getOwnUserInfo( self ):

        """ Return current user's info.
        """
        user_id = getSecurityManager().getUser().getId()

        return self.getUserInfo( user_id )

    security.declareProtected( SetOwnPassword, 'manage_updatePasswordForm' )
    manage_updatePasswordForm = PageTemplateFile( 'www/zuPasswd'
                                   , globals()
                                   , __name__='manage_updatePasswordForm'
                                   )

    security.declareProtected( SetOwnPassword, 'manage_updatePassword' )
    @csrf_only
    @postonly
    def manage_updatePassword( self
                             , login_name
                             , password
                             , confirm
                             , RESPONSE=None
                             , REQUEST=None
                             ):
        """ Update the current user's password and login name.
        """
        user_id = getSecurityManager().getUser().getId()
        if password != confirm:
            message = 'password+and+confirm+do+not+match'

        else:

            if not login_name:
                login_name = user_id

            # XXX:  validate 'user_id', 'login_name' against policies?
            self.updateUser( user_id, login_name )
            self.updateUserPassword( user_id, password )

            message = 'password+updated'

        if RESPONSE is not None:
            RESPONSE.redirect( '%s/manage_updatePasswordForm'
                               '?manage_tabs_message=%s'
                             % ( self.absolute_url(), message )
                             )
class ClickmapTool(UniqueObject, SimpleItem): 
    """ Tool that handles logging and viewing of clikcs """ 

    implements(IClickmap)
    id = 'portal_clickmap' 

    ## internal attributes
    logger = None
    enabled = False
    pages = [] ## the list of uids, which gets logged
    output_width = 1024
    output_height = 768
    right_align_threshold = 0

    def isThisPageDoLogging(self, uid, context):
        """
        return True or False, to control the js functionality
        edit users always gets False, cause the layout differs heavily from the
        normal views.
        """
        if _checkPermission(ModifyPortalContent, context):
            return False

        if uid in self.pages and self.enabled:
            return True

        return False

    def getControlPanel(self):
        """ some html for the overlay control in the output of the image. """

        (yesterday, now) = self._getDefaultTimespan()
        default_start = yesterday.strftime('%Y/%m/%d %H:%M')
        default_end = now.strftime('%Y/%m/%d %H:%M')
        
        returner = []
        returner.append(u'<form id="clickmap_output_controller_form" style="float: left;">')
        returner.append(u'<h5>%s</h5>' %(_(u"Clickmap output controller")))
        returner.append(u'<input type="text" name="start" value="%s"> %s <br />' %(default_start, _(u"Start time")))
        returner.append(u'<input type="text" name="end" value="%s"> %s <br />' %(default_end, _(u"End time")))
        returner.append(u'<input type="submit" name="refresh" value="%s">' %(_(u"Refresh")))
        returner.append(u'</form>')
        returner.append(u'<br style="clear: both;" />')
        return '\n'.join(returner)

    def _getDefaultTimespan(self):
        """ return the inital timestop for the output controller: 1 day """
        now = DateTime()
        yesterday = now - 1
        return (yesterday, now)

    def drawImage(self, uid, start=None, end=None):
        """ read the requested data and generate the output image """

        if not start or not end:
            start, end = self._getDefaultTimespan()
        else:
            try:
                start = DateTime(start)
                end = DateTime(end)
            except:
                start, end = self._getDefaultTimespan()

        ## transform the DateTime objects to integer seconds
        start = int(start)
        end = int(end)
        
        coords = self._getCoordsByUid(uid,
                                      start=start,
                                      end=end)

        imagepath = drawer.do(self.output_width,
                              self.output_height,
                              coords)
        
        r = self.REQUEST.RESPONSE
        r.setHeader('content-type', 'image/png')
        i = open(imagepath, 'rb')
        b = i.read()
        i.close()

        r.write(b)

    def _getCoordsByUid(self, uid, start, end):
        """ """
        uid_log = self.logger.get(uid)
        if uid_log is not None:
            return self._transformLogToCoordsToDraw(uid_log, start, end)

        return [] ## fallback

    def _transformLogToCoordsToDraw(self, log_data, start, end):
        """ """
        result = {}
        for timestamp, coord in log_data.items(start,
                                               end):

            x, y = coord[0], coord[1]
            key = (x, y)
            value = result.get(key, 1)
            result[key] = value + 1

        return ((item[0], item[1], value) for item, value in result.items())
    
    def initLogger(self, force=False):
        """
        a wrapper for _initLogger, which checks some conditions.
        @param force: boolean, to force reset the logger
        @returns: redirects to the controlpanel form
        """
        if self.logger is not None and force:
            self.logger = None

        if self.logger is None:
            self._initLogger()
            message = _(u"Clickmap logger initialized.")
            IStatusMessage(self.REQUEST).addStatusMessage(message, type='info')            
            self.REQUEST.RESPONSE.redirect('%s/@@clickmap-controlpanel' \
                                                  %(getToolByName(self, 'portal_url')()))
            return
        
        return str(doInit)

    def _initLogger(self):
        """ reset the logger attribute """
        self.logger = OOBTree()
        
    def _get_uid_log(self, uid):
        """
        return the IOBTree that holds the data of the given uid. If there is no
        one for the requested uid, create it.
        """
        if self.logger is None:
            self._initLogger()
            
        uid_log = self.logger.get(uid)
        if uid_log is None:
            uid_log = IOBTree()
            self.logger[uid] = uid_log
        return uid_log

    def _remove_uid_from_log(self, uid):
        """
        remove a logged page from the logger
        """
        if self.logger.get(uid) is not None:
            del self.logger[uid]
            
    def _store_request(self, uid, log_data):
        """
        store the request.
        @param uid: the uid of the object to log
        @param log_data: the IIBTree object to store
        """
        uid_log = self._get_uid_log(uid)
        uid_log[int(time())] = log_data

    def clicker(self, uid, x, y, w):
        """
        the parameters are stored transformed to the reference layout
        """
        if w > self.output_width: 
            return "" ## sorry, we dont support this...

        if x > self.right_align_threshold:
            x += (self.output_width - w)
            
        log_data = IIBTree([(0, x),
                            (1, y)])
        self._store_request(uid,
                            log_data)
        return "ok"

    def debug_info(self):
        """
        transform the logged data to readable data for debugging...
        """
        result = {}
        for uid, log in self.logger.items():
            log_data = []
            for timestamp, data in log.items():
                log_data.append("%s: x -> %s | y -> %s" %(timestamp, data[0], data[1]))
            result[uid] = log_data

        import pprint
        pprint.pprint(result)
        return "see console output...."
Exemple #26
0
class Motion(Content, ContextACLMixin, LocalRolesMixin):
    default_view = "view"
    search_visible = True
    nav_visible = False
    type_name = "Motion"
    type_title = _("Motion")
    add_permission = ADD_MOTION
    css_icon = "glyphicon glyphicon-list-alt"
    body = ""
    endorsements_text = ""
    sharing_token = None
    hashtag = ""
    _proposals = ()
    _creator = ()
    _endorsements = {}

    @property
    def __acl__(self):
        acl_list = super(Motion, self).__acl__
        motion_proc = self.__parent__
        if motion_proc:
            if motion_proc.allow_endorsements == False:
                acl_list.insert(0, (Deny, ROLE_EVERYONE, (ENDORSE_MOTION,)))
            if motion_proc.allow_sharing_link == True:
                acl_list.insert(0, (Allow, ROLE_OWNER, (ENABLE_MOTION_SHARING,)))
            wf = self.workflow
            state = ""
            if wf:
                state = wf.state in wf.states and wf.state or wf.initial_state
            if state and state != "draft":
                if motion_proc.motion_visibility == "authenticated":
                    acl_list.insert(0, (Allow, ROLE_AUTHENTICATED, (PERM_VIEW,)))
                if motion_proc.motion_visibility == "everyone":
                    acl_list.insert(0, (Allow, ROLE_EVERYONE, (PERM_VIEW,)))
        return acl_list

    @property
    def proposals(self):
        return tuple(self._proposals)

    @proposals.setter
    def proposals(self, value):
        if not isinstance(self._proposals, PersistentList):
            self._proposals = PersistentList()
        if tuple(value) != tuple(self._proposals):
            self._proposals[:] = []
            self._proposals.extend(value)

    @property
    def creator(self):
        return tuple(self._creator)

    @creator.setter
    def creator(self, value):
        if tuple(value) != self._creator:
            self._creator = tuple(value)

    @property
    def endorsements(self):
        return tuple(self._endorsements)

    @endorsements.setter
    def endorsements(self, value):
        if not isinstance(self._endorsements, OOBTree):
            self._endorsements = OOBTree()
        # Add new with timestamp
        for userid in set(value) - set(self._endorsements):
            self.local_roles.add(userid, ROLE_VIEWER)
            self._endorsements[userid] = utcnow()
        # Remove no longer endorsing userids
        for userid in set(self._endorsements) - set(value):
            del self._endorsements[userid]
            self.local_roles.remove(userid, ROLE_VIEWER)

    @property
    def endorsements_info(self):
        return self._endorsements.items()

    def enable_sharing_token(self):
        self.sharing_token = "".join(
            [choice(string.letters + string.digits) for x in range(15)]
        )
        return self.sharing_token

    def remove_sharing_token(self):
        self.sharing_token = None
Exemple #27
0
class Evaluations(persistent.Persistent, container.contained.Contained):
    """Evaluations mapping.

    This particular implementation uses the ``zope.app.keyreference`` package
    to generate the keys of the requirements. Any key that is passed in could
    be the requirement or the ``IKeyReference`` of the requirement. This
    implementation will always convert the key to provide ``IKeyReference``
    before treating it as a true key.

    Another feature of this implementation is that if you set an evaluation
    for a requirement that has already an evaluation, then the old evaluation
    is simply overridden. The ``IContainer`` interface would raise a duplicate
    name error.
    """
    zope.interface.implements(interfaces.IEvaluations)

    def __init__(self, items=None):
        super(Evaluations, self).__init__()
        self._btree = OOBTree()
        for name, value in items or []:
            self[name] = value

    def __getitem__(self, key):
        """See zope.interface.common.mapping.IItemMapping"""
        return self._btree[IKeyReference(key)]

    def __delitem__(self, key):
        """See zope.interface.common.mapping.IWriteMapping"""
        value = self[key]
        del self._btree[IKeyReference(key)]
        event = container.contained.ObjectRemovedEvent(value, self)
        zope.event.notify(event)

    def __setitem__(self, key, value):
        """See zope.interface.common.mapping.IWriteMapping"""
        self._btree[IKeyReference(key)] = value
        value, event = container.contained.containedEvent(value, self)
        zope.event.notify(event)

    def get(self, key, default=None):
        """See zope.interface.common.mapping.IReadMapping"""
        try:
            return self[key]
        except KeyError:
            return default

    def __contains__(self, key):
        """See zope.interface.common.mapping.IReadMapping"""
        return IKeyReference(key) in self._btree

    def keys(self):
        """See zope.interface.common.mapping.IEnumerableMapping"""
        # For now I decided to return the activities (as I think it is more
        # natural), though they are not the true keys as we know
        return [key() for key in self._btree.keys()]

    def __iter__(self):
        """See zope.interface.common.mapping.IEnumerableMapping"""
        return iter(self.keys())

    def values(self):
        """See zope.interface.common.mapping.IEnumerableMapping"""
        return self._btree.values()

    def items(self):
        """See zope.interface.common.mapping.IEnumerableMapping"""
        return [(key(), value) for key, value in self._btree.items()]

    def __len__(self):
        """See zope.interface.common.mapping.IEnumerableMapping"""
        return len(self._btree)

    def addEvaluation(self, evaluation):
        """See interfaces.IEvaluations"""
        self[evaluation.requirement] = evaluation

    def getEvaluationsForRequirement(self, req, recurse=True):
        """See interfaces.IEvaluations"""
        requirements = getRequirementList(req, recurse)
        result = [(name, ev) for name, ev in self.items()
                  if ev.requirement in requirements]
        result = Evaluations(result)
        location.locate(result, getParent(self), getName(self))
        return result

    def getEvaluationsOfEvaluator(self, evaluator):
        """See interfaces.IEvaluations"""
        result = [(name, ev) for name, ev in self.items()
                  if ev.evaluator == evaluator]
        result = Evaluations(result)
        location.locate(result, getParent(self), getName(self))
        return result

    def __repr__(self):
        try:
            parent = getParent(self)
        except TypeError:
            parent = None
        return '<%s for %r>' % (self.__class__.__name__, parent)
Exemple #28
0
class Index:

    def __init__(self, table, col_idx, idx_type, transform_criteria=None):
        self.index = None
        self.type = idx_type
        self.table = table
        self.transform_criteria = transform_criteria
        # table.indexes[col_idx]=idx_type

        if idx_type == "Hash":
            self.create_hash_index(col_idx)
        elif idx_type == "Hash_Transform":
            self.create_transform_hash_index(col_idx)
        else:
            self.index = OOBTree()
            for i, row in enumerate(self.table.rows):
                k = row[col_idx]
                if k in self.index:
                    self.index[k].append((i, col_idx))
                else:
                    self.index[k] = [(i, col_idx)]

    def create_hash_index(self, col_idx):
        self.index = {}
        if self.table.is_col_numeric(col_idx):
            for i, row in enumerate(self.table.rows):
                k = float(row[col_idx])
                if k in self.index.keys():
                    self.index[k].append((i, col_idx))
                else:
                    self.index[k] = [(i, col_idx)]
    
        else:  # indexed column is not numberic ; keys should be strings
            for i, row in enumerate(self.table.rows):
                k = row[col_idx]
                if k in self.index.keys():
                    self.index[k].append((i, col_idx))
                else:
                    self.index[k] = [(i, col_idx)]

    def create_transform_hash_index(self, col_idx):
        # assuming transformed coclumns are numeric
        self.index = {}
        arithop = self.transform_criteria[0]
        constant = self.transform_criteria[1]
        for i, row in enumerate(self.table.rows):
            key = row[col_idx]
            transformed_key = arithop(float(key), float(constant))
            if transformed_key in self.index.keys():
                self.index[transformed_key].append((i, col_idx))
            else:
                self.index[transformed_key] = [(i, col_idx)]

    def get_pos(self, key):
        if key in self.index.keys():
            return self.index[key]
        else:
            return None

    def print(self, f=None):
        for k, v in self.index.items():
            print("%-10s -> %s" % (k, v), file=f)
class PendingList(object):
    """ Implementation of IPendingList

    Set up the pending list
    >>> from Products.listen.content import PendingList
    >>> plist = PendingList()
    
    Add a few pending members
    >>> plist.add('tom')
    >>> plist.add('*****@*****.**')
    >>> plist.add('mikey', time='2006-05-09', pin='4532123')
    >>> sorted(plist.get_user_emails())
    ['*****@*****.**', 'mikey', 'tom']

    The time that we set on mikey should be used instead of the default time
    >>> plist.get_pending_time('mikey')
    '2006-05-09'
    >>> plist.get_user_pin('mikey')
    '4532123'

    Try and add mikey a second time and make sure data is not lost but time is updated
    >>> plist.add('mikey')
    >>> plist.get_user_pin('mikey')
    '4532123'
    >>> plist.get_pending_time('mikey') != '2006-05-09'
    True

    Now let's remove them
    >>> plist.remove('tom')
    >>> plist.remove('*****@*****.**')
    >>> plist.remove('mikey')
    >>> plist.get_user_emails()
    []

    Let's create an item with a post
    >>> plist.add('timmy', post='a new post')
    >>> post = plist.get_posts('timmy')[0]
    >>> post['header']
    {}
    >>> post['body']
    'a new post'

    Verify the id of the post
    >>> post['postid']
    0
    
    Let's add a new post, and verify its id too
    >>> plist.add('timmy', post='hi there')
    >>> newpost = plist.get_posts('timmy')[1]
    >>> newpost['postid']
    1

    Remove the first one
    >>> plist.pop_post('timmy', 0) is not None
    True
    >>> p = plist.get_posts('timmy')[0]
    >>> p['body']
    'hi there'
    >>> p['postid']
    1

    Trying to pop a fake post returns None
    >>> plist.pop_post('timmy', 0) is None
    True
    >>> plist.pop_post('timmy', 17) is None
    True

    """
    implements(IPendingList)

    def __init__(self):
        self.pend = OOBTree()
        self.trust_caller = False

    def add(self, item, **values):
        self.pend.setdefault(item, OOBTree())
        if 'time' not in values:
            if self.trust_caller:
                raise AssertionError("No time passed in: %s" % values)
            values['time'] = DateTime().ISO()
        if 'post' in values:
            post_list = self.pend[item].setdefault('post', IOBTree())
            new_post = values['post']
            if isinstance(new_post, basestring):
                new_post = dict(header={}, body=new_post)

            try:
                nextid = post_list.maxKey() + 1
            except ValueError:
                nextid = 0

            if self.trust_caller:
                assert 'postid' in new_post, new_post
            else:
                new_post['postid'] = nextid
            post_list[new_post['postid']] = new_post
            values.pop('post')
        self.pend[item].update(values)

    def remove(self, item):
        if item in self.pend:
            self.pend.pop(item)

    def pop_post(self, item, postid):
        posts = self.pend[item]['post']
        try:
            return posts.pop(postid)
        except KeyError:
            return None

    def get_posts(self, user_email):
        return list(self.pend.get(user_email, {}).get('post', {}).values())

    def is_pending(self, item):
        return item in self.pend

    def get_user_pin(self, user_email):
        return self.pend.get(user_email, {}).get('pin')

    def get_pending_time(self, user_email):
        return self.pend.get(user_email, {}).get('time')

    def get_user_emails(self):
        return list(self.pend.keys())

    def get_user_name(self, user_email):
        return self.pend.get(user_email, {}).get('user_name')

    def clear(self):
        for email, item in self.pend.items():
            if 'post' in item:
                for post in item['post'].values():
                    for k, v in item.items():
                        if k == 'post': continue
                        post[k] = v
                    post['email'] = email
                    yield post
            else:
                item['email'] = email
                yield item
        self.pend.clear()
Exemple #30
0
class UnIndex(SimpleItem):
    """Simple forward and reverse index.
    """
    implements(ILimitedResultIndex, IUniqueValueIndex, ISortIndex)

    def __init__(self,
                 id,
                 ignore_ex=None,
                 call_methods=None,
                 extra=None,
                 caller=None):
        """Create an unindex

        UnIndexes are indexes that contain two index components, the
        forward index (like plain index objects) and an inverted
        index.  The inverted index is so that objects can be unindexed
        even when the old value of the object is not known.

        e.g.

        self._index = {datum:[documentId1, documentId2]}
        self._unindex = {documentId:datum}

        The arguments are:

          'id' -- the name of the item attribute to index.  This is
          either an attribute name or a record key.

          'ignore_ex' -- should be set to true if you want the index
          to ignore exceptions raised while indexing instead of
          propagating them.

          'call_methods' -- should be set to true if you want the index
          to call the attribute 'id' (note: 'id' should be callable!)
          You will also need to pass in an object in the index and
          uninded methods for this to work.

          'extra' -- a mapping object that keeps additional
          index-related parameters - subitem 'indexed_attrs'
          can be string with comma separated attribute names or
          a list

          'caller' -- reference to the calling object (usually
          a (Z)Catalog instance
        """
        def _get(o, k, default):
            """ return a value for a given key of a dict/record 'o' """
            if isinstance(o, dict):
                return o.get(k, default)
            else:
                return getattr(o, k, default)

        self.id = id
        self.ignore_ex = ignore_ex  # currently unimplimented
        self.call_methods = call_methods

        self.operators = ('or', 'and')
        self.useOperator = 'or'

        # allow index to index multiple attributes
        ia = _get(extra, 'indexed_attrs', id)
        if isinstance(ia, str):
            self.indexed_attrs = ia.split(',')
        else:
            self.indexed_attrs = list(ia)
        self.indexed_attrs = [
            attr.strip() for attr in self.indexed_attrs if attr
        ]
        if not self.indexed_attrs:
            self.indexed_attrs = [id]

        self.clear()

    def __len__(self):
        return self._length()

    def getId(self):
        return self.id

    def clear(self):
        self._length = Length()
        self._index = OOBTree()
        self._unindex = IOBTree()

    def __nonzero__(self):
        return not not self._unindex

    def histogram(self):
        """Return a mapping which provides a histogram of the number of
        elements found at each point in the index.
        """
        histogram = {}
        for item in self._index.items():
            if isinstance(item, int):
                entry = 1  # "set" length is 1
            else:
                key, value = item
                entry = len(value)
            histogram[entry] = histogram.get(entry, 0) + 1

        return histogram

    def referencedObjects(self):
        """Generate a list of IDs for which we have referenced objects."""
        return self._unindex.keys()

    def getEntryForObject(self, documentId, default=_marker):
        """Takes a document ID and returns all the information we have
        on that specific object.
        """
        if default is _marker:
            return self._unindex.get(documentId)
        else:
            return self._unindex.get(documentId, default)

    def removeForwardIndexEntry(self, entry, documentId):
        """Take the entry provided and remove any reference to documentId
        in its entry in the index.
        """
        indexRow = self._index.get(entry, _marker)
        if indexRow is not _marker:
            try:
                indexRow.remove(documentId)
                if not indexRow:
                    del self._index[entry]
                    self._length.change(-1)

            except ConflictError:
                raise

            except AttributeError:
                # index row is an int
                try:
                    del self._index[entry]
                except KeyError:
                    # XXX swallow KeyError because it was probably
                    # removed and then _length AttributeError raised
                    pass
                if isinstance(self.__len__, Length):
                    self._length = self.__len__
                    del self.__len__
                self._length.change(-1)

            except:
                LOG.error(
                    '%s: unindex_object could not remove '
                    'documentId %s from index %s.  This '
                    'should not happen.' %
                    (self.__class__.__name__, str(documentId), str(self.id)),
                    exc_info=sys.exc_info())
        else:
            LOG.error('%s: unindex_object tried to retrieve set %s '
                      'from index %s but couldn\'t.  This '
                      'should not happen.' %
                      (self.__class__.__name__, repr(entry), str(self.id)))

    def insertForwardIndexEntry(self, entry, documentId):
        """Take the entry provided and put it in the correct place
        in the forward index.

        This will also deal with creating the entire row if necessary.
        """
        indexRow = self._index.get(entry, _marker)

        # Make sure there's actually a row there already. If not, create
        # a set and stuff it in first.
        if indexRow is _marker:
            # We always use a set to avoid getting conflict errors on
            # multiple threads adding a new row at the same time
            self._index[entry] = IITreeSet((documentId, ))
            self._length.change(1)
        else:
            try:
                indexRow.insert(documentId)
            except AttributeError:
                # Inline migration: index row with one element was an int at
                # first (before Zope 2.13).
                indexRow = IITreeSet((indexRow, documentId))
                self._index[entry] = indexRow

    def index_object(self, documentId, obj, threshold=None):
        """ wrapper to handle indexing of multiple attributes """

        fields = self.getIndexSourceNames()

        res = 0
        for attr in fields:
            res += self._index_object(documentId, obj, threshold, attr)

        return res > 0

    def _index_object(self, documentId, obj, threshold=None, attr=''):
        """ index and object 'obj' with integer id 'documentId'"""
        returnStatus = 0

        # First we need to see if there's anything interesting to look at
        datum = self._get_object_datum(obj, attr)

        # We don't want to do anything that we don't have to here, so we'll
        # check to see if the new and existing information is the same.
        oldDatum = self._unindex.get(documentId, _marker)
        if datum != oldDatum:
            if oldDatum is not _marker:
                self.removeForwardIndexEntry(oldDatum, documentId)
                if datum is _marker:
                    try:
                        del self._unindex[documentId]
                    except ConflictError:
                        raise
                    except:
                        LOG.error(
                            'Should not happen: oldDatum was there, now its not,'
                            'for document with id %s' % documentId)

            if datum is not _marker:
                self.insertForwardIndexEntry(datum, documentId)
                self._unindex[documentId] = datum

            returnStatus = 1

        return returnStatus

    def _get_object_datum(self, obj, attr):
        # self.id is the name of the index, which is also the name of the
        # attribute we're interested in.  If the attribute is callable,
        # we'll do so.
        try:
            datum = getattr(obj, attr)
            if safe_callable(datum):
                datum = datum()
        except (AttributeError, TypeError):
            datum = _marker
        return datum

    def numObjects(self):
        """Return the number of indexed objects."""
        return len(self._unindex)

    def indexSize(self):
        """Return the size of the index in terms of distinct values."""
        return len(self)

    def unindex_object(self, documentId):
        """ Unindex the object with integer id 'documentId' and don't
        raise an exception if we fail
        """
        unindexRecord = self._unindex.get(documentId, _marker)
        if unindexRecord is _marker:
            return None

        self.removeForwardIndexEntry(unindexRecord, documentId)

        try:
            del self._unindex[documentId]
        except ConflictError:
            raise
        except:
            LOG.debug('Attempt to unindex nonexistent document'
                      ' with id %s' % documentId,
                      exc_info=True)

    def _apply_index(self, request, resultset=None):
        """Apply the index to query parameters given in the request arg.

        The request argument should be a mapping object.

        If the request does not have a key which matches the "id" of
        the index instance, then None is returned.

        If the request *does* have a key which matches the "id" of
        the index instance, one of a few things can happen:

          - if the value is a blank string, None is returned (in
            order to support requests from web forms where
            you can't tell a blank string from empty).

          - if the value is a nonblank string, turn the value into
            a single-element sequence, and proceed.

          - if the value is a sequence, return a union search.

          - If the value is a dict and contains a key of the form
            '<index>_operator' this overrides the default method
            ('or') to combine search results. Valid values are "or"
            and "and".

        If None is not returned as a result of the abovementioned
        constraints, two objects are returned.  The first object is a
        ResultSet containing the record numbers of the matching
        records.  The second object is a tuple containing the names of
        all data fields used.

        FAQ answer:  to search a Field Index for documents that
        have a blank string as their value, wrap the request value
        up in a tuple ala: request = {'id':('',)}
        """
        record = parseIndexRequest(request, self.id, self.query_options)
        if record.keys is None:
            return None

        index = self._index
        r = None
        opr = None

        # experimental code for specifing the operator
        operator = record.get('operator', self.useOperator)
        if not operator in self.operators:
            raise RuntimeError("operator not valid: %s" % escape(operator))

        # Range parameter
        range_parm = record.get('range', None)
        if range_parm:
            opr = "range"
            opr_args = []
            if range_parm.find("min") > -1:
                opr_args.append("min")
            if range_parm.find("max") > -1:
                opr_args.append("max")

        if record.get('usage', None):
            # see if any usage params are sent to field
            opr = record.usage.lower().split(':')
            opr, opr_args = opr[0], opr[1:]

        if opr == "range":  # range search
            if 'min' in opr_args: lo = min(record.keys)
            else: lo = None
            if 'max' in opr_args: hi = max(record.keys)
            else: hi = None
            if hi:
                setlist = index.values(lo, hi)
            else:
                setlist = index.values(lo)

            # If we only use one key, intersect and return immediately
            if len(setlist) == 1:
                result = setlist[0]
                if isinstance(result, int):
                    result = IISet((result, ))
                return result, (self.id, )

            if operator == 'or':
                tmp = []
                for s in setlist:
                    if isinstance(s, int):
                        s = IISet((s, ))
                    tmp.append(s)
                r = multiunion(tmp)
            else:
                # For intersection, sort with smallest data set first
                tmp = []
                for s in setlist:
                    if isinstance(s, int):
                        s = IISet((s, ))
                    tmp.append(s)
                if len(tmp) > 2:
                    setlist = sorted(tmp, key=len)
                else:
                    setlist = tmp
                r = resultset
                for s in setlist:
                    # the result is bound by the resultset
                    r = intersection(r, s)

        else:  # not a range search
            # Filter duplicates
            setlist = []
            for k in record.keys:
                s = index.get(k, None)
                # If None, try to bail early
                if s is None:
                    if operator == 'or':
                        # If union, we can't possibly get a bigger result
                        continue
                    # If intersection, we can't possibly get a smaller result
                    return IISet(), (self.id, )
                elif isinstance(s, int):
                    s = IISet((s, ))
                setlist.append(s)

            # If we only use one key return immediately
            if len(setlist) == 1:
                result = setlist[0]
                if isinstance(result, int):
                    result = IISet((result, ))
                return result, (self.id, )

            if operator == 'or':
                # If we already get a small result set passed in, intersecting
                # the various indexes with it and doing the union later is
                # faster than creating a multiunion first.
                if resultset is not None and len(resultset) < 200:
                    smalllist = []
                    for s in setlist:
                        smalllist.append(intersection(resultset, s))
                    r = multiunion(smalllist)
                else:
                    r = multiunion(setlist)
            else:
                # For intersection, sort with smallest data set first
                if len(setlist) > 2:
                    setlist = sorted(setlist, key=len)
                r = resultset
                for s in setlist:
                    r = intersection(r, s)

        if isinstance(r, int):
            r = IISet((r, ))
        if r is None:
            return IISet(), (self.id, )
        else:
            return r, (self.id, )

    def hasUniqueValuesFor(self, name):
        """has unique values for column name"""
        if name == self.id:
            return 1
        else:
            return 0

    def getIndexSourceNames(self):
        """ return sequence of indexed attributes """
        # BBB:  older indexes didn't have 'indexed_attrs'
        return getattr(self, 'indexed_attrs', [self.id])

    def uniqueValues(self, name=None, withLengths=0):
        """returns the unique values for name

        if withLengths is true, returns a sequence of
        tuples of (value, length)
        """
        if name is None:
            name = self.id
        elif name != self.id:
            return []

        if not withLengths:
            return tuple(self._index.keys())
        else:
            rl = []
            for i in self._index.keys():
                set = self._index[i]
                if isinstance(set, int):
                    l = 1
                else:
                    l = len(set)
                rl.append((i, l))
            return tuple(rl)

    def keyForDocument(self, id):
        # This method is superceded by documentToKeyMap
        return self._unindex[id]

    def documentToKeyMap(self):
        return self._unindex

    def items(self):
        items = []
        for k, v in self._index.items():
            if isinstance(v, int):
                v = IISet((v, ))
            items.append((k, v))
        return items
class ToManyContRelationship(ToManyRelationshipBase):
    """
    ToManyContRelationship is the ToMany side of a realtionship that
    contains its related objects (like the normal Zope ObjectManager)
    """

    meta_type = "ToManyContRelationship"

    security = ClassSecurityInfo()


    def __init__(self, id):
        """set our instance values"""
        self.id = id
        self._objects = OOBTree()


    def _safeOfObjects(self):
        """
        Try to safely return ZenPack objects rather than
        causing imports to fail.
        """
        objs = []
        for ob in self._objects.values():
            try:
                objs.append(ob.__of__(self))
            except AttributeError:
                log.info("Ignoring unresolvable object '%s'", str(ob))
        return objs

    def __call__(self):
        """when we are called return our related object in our aq context"""
        return self._safeOfObjects()


    def __getattr__(self, name):
        """look in the two object stores for related objects"""
        if '_objects' in self.__dict__:
            objects = self._objects
            if objects.has_key(name): return objects[name]
        raise AttributeError( "Unable to find the attribute '%s'" % name )


    def __hasattr__(self, name):
        """check to see if we have an object by an id
        this will fail if passed a short id and object is stored
        with fullid (ie: it is related not contained)
        use hasobject to get around this issue"""
        return self._objects.has_key(name)


    def hasobject(self, obj):
        "check to see if we have this object"
        return self._objects.get(obj.id) == obj


    def addRelation(self, obj):
        """Override base to run manage_afterAdd like ObjectManager"""
        if self._objects.has_key(obj.getId()):
            log.debug("obj %s already exists on %s", obj.getPrimaryId(),
                        self.getPrimaryId())

        notify(ObjectWillBeAddedEvent(obj, self, obj.getId()))
        ToManyRelationshipBase.addRelation(self, obj)
        obj = obj.__of__(self)
        o = self._getOb(obj.id)
        notify(ObjectAddedEvent(o, self, obj.getId()))


    def _setObject(self,id,object,roles=None,user=None,set_owner=1):
        """ObjectManager interface to add contained object."""
        unused(user, roles, set_owner)
        object.__primary_parent__ = aq_base(self)
        self.addRelation(object)
        return object.getId()


    def manage_afterAdd(self, item, container):
        # Don't do recursion anymore, a subscriber does that.
        pass
    manage_afterAdd.__five_method__ = True

    def manage_afterClone(self, item):
        # Don't do recursion anymore, a subscriber does that.
        pass
    manage_afterClone.__five_method__ = True

    def manage_beforeDelete(self, item, container):
        # Don't do recursion anymore, a subscriber does that.
        pass
    manage_beforeDelete.__five_method__ = True

    def _add(self,obj):
        """add an object to one side of a ToManyContRelationship.
        """
        id = obj.id
        if self._objects.has_key(id):
            raise RelationshipExistsError
        v=checkValidId(self, id)
        if v is not None: id=v
        self._objects[id] = aq_base(obj)
        obj = aq_base(obj).__of__(self)


    def _remove(self, obj=None, suppress_events=False):
        """remove object from our side of a relationship"""
        if obj: objs = [obj]
        else: objs = self.objectValuesAll()
        if not suppress_events:
            for robj in objs:
                notify(ObjectWillBeRemovedEvent(robj, self, robj.getId()))
        if obj:
            id = obj.id
            if not self._objects.has_key(id):
                raise ObjectNotFound(
                    "object %s not found on %s" % (
                    obj.getPrimaryId(), self.getPrimaryId()))
            del self._objects[id]
        else:
            self._objects = OOBTree()
            self.__primary_parent__._p_changed = True
        if not suppress_events:
            for robj in objs:
                notify(ObjectRemovedEvent(robj, self, robj.getId()))


    def _remoteRemove(self, obj=None):
        """remove an object from the far side of this relationship
        if no object is passed in remove all objects"""
        if obj:
            if not self._objects.has_key(obj.id):
                raise ObjectNotFound("object %s not found on %s" % (
                    obj.getPrimaryId(), self.getPrimaryId()))
            objs = [obj]
        else: objs = self.objectValuesAll()
        remoteName = self.remoteName()
        for obj in objs:
            rel = getattr(obj, remoteName)
            try:
                rel._remove(self.__primary_parent__)
            except ObjectNotFound:
                message = log_tb(sys.exc_info())
                log.error('Remote remove failed. Run "zenchkrels -r -x1". ' + message)
                continue


    def _getOb(self, id, default=zenmarker):
        """look up in our local store and wrap in our aq_chain"""
        if self._objects.has_key(id):
            return self._objects[id].__of__(self)
        elif default == zenmarker:
            raise AttributeError( "Unable to find %s" % id )
        return default


    security.declareProtected('View', 'objectIds')
    def objectIds(self, spec=None):
        """only return contained objects"""
        if spec:
            if isinstance(spec,basestring): spec=[spec]
            return [obj.id for obj in self._objects.values() \
                        if obj.meta_type in spec]
        return [ k for k in self._objects.keys() ]
    objectIdsAll = objectIds


    security.declareProtected('View', 'objectValues')
    def objectValues(self, spec=None):
        """override to only return owned objects for many to many rel"""
        if spec:
            if isinstance(spec,basestring): spec=[spec]
            return [ob.__of__(self) for ob in self._objects.values() \
                        if ob.meta_type in spec]
        return self._safeOfObjects()
    security.declareProtected('View', 'objectValuesAll')
    objectValuesAll = objectValues


    def objectValuesGen(self):
        """Generator that returns all related objects."""
        return (obj.__of__(self) for obj in self._objects.values())


    def objectItems(self, spec=None):
        """over ride to only return owned objects for many to many rel"""
        if spec:
            if isinstance(spec,basestring): spec=[spec]
            return [(key,value.__of__(self)) \
                for (key,value) in self._objects.items() \
                    if value.meta_type in spec]
        return [(key,value.__of__(self)) \
                    for (key,value) in self._objects.items()]
    objectItemsAll = objectItems


#FIXME - need to make this work
#    def all_meta_types(self, interfaces=None):
#        mts = []
#        for mt in ToManyRelationshipBase.all_meta_types(self, interfaces):
#            if (mt.has_key('instance') and mt['instance']):
#                for cl in self.sub_classes:
#                    if checkClass(mt['instance'], cl):
#                        mts.append(mt)
#        return mts


    def _getCopy(self, container):
        """
        make new relation add copies of contained objs
        and refs if the relation is a many to many
        """
        rel = self.__class__(self.id)
        rel.__primary_parent__ = container
        rel = rel.__of__(container)
        norelcopy = getattr(self, 'zNoRelationshipCopy', [])
        if self.id in norelcopy: return rel
        for oobj in self.objectValuesAll():
            cobj = oobj._getCopy(rel)
            rel._setObject(cobj.id, cobj)
        return rel

    def checkValidId(self, id):
        """
        Is this a valid id for this container?
        """
        try:
            checkValidId(self, id)
        except:
            raise
        else:
            return True

    def exportXml(self, ofile, ignorerels=[]):
        """Return an xml representation of a ToManyContRelationship
        <tomanycont id='interfaces'>
            <object id='hme0'
                module='Products.Confmon.IpInterface' class='IpInterface'>
                <property></property> etc....
            </object>
        </tomanycont>
        """
        if self.countObjects() == 0: return
        ofile.write("<tomanycont id='%s'>\n" % self.id)
        for obj in self.objectValues():
            obj.exportXml(ofile, ignorerels)
        ofile.write("</tomanycont>\n")


    def checkRelation(self, repair=False):
        """Check to make sure that relationship bidirectionality is ok.
        """
        if len(self._objects):
            log.debug("checking relation: %s", self.id)
        else:
            return

        # look for objects that don't point back to us
        # or who should no longer exist in the database
        remoteName = self.remoteName()
        parentObject = self.getPrimaryParent()
        for obj in self._objects.values():
            if not hasattr(obj, remoteName):
                path = parentObject.getPrimaryUrlPath()
                if repair:
                    log.warn("Deleting %s object '%s' relation '%s' (missing remote relation '%s')",
                             path, obj, self.id, remoteName)
                    self._remove(obj, True)
                    continue
                else:
                    msg = "%s object '%s' relation '%s' missing remote relation '%s'" % (
                             path, obj, self.id, remoteName)
                    raise AttributeError(msg)

            rrel = getattr(obj, remoteName)
            if not rrel.hasobject(parentObject):
                log.error("remote relation %s doesn't point back to %s",
                                rrel.getPrimaryId(), self.getPrimaryId())
                if repair:
                    log.warn("reconnecting relation %s to relation %s",
                            rrel.getPrimaryId(),self.getPrimaryId())
                    rrel._add(parentObject)
Exemple #32
0
class CategoryDateIndex(Persistent):
    def __init__(self):
        self._idxCategItem = OOBTree()

    def dump(self):
        return map(lambda idx: (idx[0], idx[1].dump()),
                   list(self._idxCategItem.items()))

    def unindexConf(self, conf):
        for owner in conf.getOwnerPath():
            if self._idxCategItem.has_key(owner.getId()):
                self._idxCategItem[owner.getId()].unindexConf(conf)
        if self._idxCategItem.has_key('0'):
            self._idxCategItem['0'].unindexConf(conf)

    def reindexCateg(self, categ):
        for subcat in categ.getSubCategoryList():
            self.reindexCateg(subcat)
        for conf in categ.getConferenceList():
            self.unindexConf(conf)
            self.indexConf(conf)


#        from indico.core.db import DBMgr
#        dbi = DBMgr.getInstance()
#        for subcat in categ.getSubCategoryList():
#            self.reindexCateg(subcat)
#        for conf in categ.getConferenceList():
#            while True:
#                try:
#                    dbi.sync()
#                    self.unindexConf(conf)
#                    self.indexConf(conf)
#                    dbi.commit()
#                    break
#                except:
#                    print 'Exception commiting conf %s'%conf.getId()

    def unindexCateg(self, categ):
        for subcat in categ.getSubCategoryList():
            self.unindexCateg(subcat)
        for conf in categ.getConferenceList():
            self.unindexConf(conf)

    def indexCateg(self, categ, dbi=None, counter=0):
        for subcat in categ.getSubCategoryList():
            self.indexCateg(subcat, dbi=dbi, counter=counter + 1)
            if dbi and counter < 2:
                dbi.commit()
        for conf in categ.getConferenceList():
            self.indexConf(conf)

    def _indexConf(self, categid, conf):
        # only the more restrictive setup is taken into account
        if categid in self._idxCategItem:
            res = self._idxCategItem[categid]
        else:
            res = CalendarIndex()
        res.indexConf(conf)
        self._idxCategItem[categid] = res

    # TOREMOVE?? defined in CategoryDayIndex
    def indexConf(self, conf):
        for categ in conf.getOwnerPath():
            self._indexConf(categ.getId(), conf)
        self._indexConf("0", conf)

    def getObjectsIn(self, categid, sDate, eDate):
        categid = str(categid)
        if categid in self._idxCategItem:
            return self._idxCategItem[categid].getObjectsIn(sDate, eDate)
        else:
            return []

    def getObjectsStartingIn(self, categid, sDate, eDate):
        categid = str(categid)
        if categid in self._idxCategItem:
            return self._idxCategItem[categid].getObjectsStartingIn(
                sDate, eDate)
        else:
            return []

    def getObjectsInDay(self, categid, sDate):
        categid = str(categid)
        if categid in self._idxCategItem:
            return self._idxCategItem[categid].getObjectsInDay(sDate)
        else:
            return []

    def hasObjectsAfter(self, categid, sDate):
        categid = str(categid)
        if categid in self._idxCategItem:
            return self._idxCategItem[categid].hasObjectsAfter(sDate)
        else:
            return False

    def getObjectsEndingAfter(self, categid, sDate):
        categid = str(categid)
        if categid in self._idxCategItem:
            return self._idxCategItem[categid].getObjectsEndingAfter(sDate)
        else:
            return []
Exemple #33
0
class Folder(Persistent, Contained):
    """The standard Zope Folder implementation."""

    implements(IContentContainer)

    def __init__(self):
        self.data = OOBTree()

    def keys(self):
        """Return a sequence-like object containing the names
           associated with the objects that appear in the folder
        """
        return self.data.keys()

    def __iter__(self):
        return iter(self.data.keys())

    def values(self):
        """Return a sequence-like object containing the objects that
           appear in the folder.
        """
        return self.data.values()

    def items(self):
        """Return a sequence-like object containing tuples of the form
           (name, object) for the objects that appear in the folder.
        """
        return self.data.items()

    def __getitem__(self, name):
        """Return the named object, or raise ``KeyError`` if the object
           is not found.
        """
        return self.data[name]

    def get(self, name, default=None):
        """Return the named object, or the value of the `default`
           argument if the object is not found.
        """
        return self.data.get(name, default)

    def __contains__(self, name):
        """Return true if the named object appears in the folder."""
        return self.data.has_key(name)

    def __len__(self):
        """Return the number of objects in the folder."""
        return len(self.data)

    def __setitem__(self, name, object):
        """Add the given object to the folder under the given name."""

        if not (isinstance(name, str) or isinstance(name, unicode)):
            raise TypeError("Name must be a string rather than a %s" %
                            name.__class__.__name__)
        try:
            unicode(name)
        except UnicodeError:
            raise TypeError("Non-unicode names must be 7-bit-ascii only")
        if not name:
            raise TypeError("Name must not be empty")

        if name in self.data:
            raise KeyError("name, %s, is already in use" % name)

        setitem(self, self.data.__setitem__, name, object)

    def __delitem__(self, name):
        """Delete the named object from the folder. Raises a KeyError
           if the object is not found."""
        uncontained(self.data[name], self, name)
        del self.data[name]
class NoDuplicateLogin(BasePlugin, Cacheable):
    """PAS plugin that rejects multiple logins with the same user at
    the same time, by forcing a logout of all but one user.  If a user has max_seats > 1, then it will reject users after maximum seats are filled.
    """

    meta_type = 'No Duplicate Login Plugin'
    cookie_name = '__noduplicate'
    DEBUG = False
    security = ClassSecurityInfo()
    login_member_data_mapping = None

    _properties = (
        {
            'id': 'title',
            'label': 'Title',
            'type': 'string',
            'mode': 'w'
        },
        {
            'id': 'cookie_name',
            'label': 'Cookie Name',
            'type': 'string',
            'mode': 'w'
        },
    )

    # UIDs older than 30 minutes are deleted from our storage; this can also be set per member data property (which default to 5 minutes)...
    if DEBUG:
        default_minutes_to_persist = 5
    else:
        default_minutes_to_persist = 30

    time_to_persist_cookies = datetime.timedelta(
        minutes=default_minutes_to_persist)

    # XXX I wish I had a better explanation for this, but disabling this makes
    # both the ZMI (basic auth) work and the NoDuplicateLogin work.
    # Otherwise, we get a traceback on basic auth. I suspect that means this
    # plugin needs to handle basic auth better but I'm not sure how or why.
    # Normally, we would prefer to see our exceptions.
    _dont_swallow_my_exceptions = False

    def __init__(self, id, title=None, cookie_name=''):
        self._id = self.id = id
        self.title = title

        if cookie_name:
            self.cookie_name = cookie_name

        self.mapping1 = OOBTree()  # userid : { tokens:[ UID, UID, UID] }
        self.mapping2 = OOBTree(
        )  # UID : { userid: string, ip: string, startTime: DateTime, expireTime: DateTime }
        self.login_member_data_mapping = OOBTree(
        )  # userid : { maxSeats: integer, seatTimeoutInMinutes: float, expireTime: DateTime }

        self.plone_session = None  # for plone.session

    security.declarePrivate('authenticateCredentials')

    def authenticateCredentials(self, credentials):
        """See IAuthenticationPlugin.

        This plugin will actually never authenticate.

        o We expect the credentials to be those returned by
          ILoginPasswordExtractionPlugin.
        """
        request = self.REQUEST
        alsoProvides(request, IDisableCSRFProtection)

        response = request['RESPONSE']
        pas_instance = self._getPAS()

        login = credentials.get('login')
        password = credentials.get('password')

        if None in (login, password, pas_instance) and (
                credentials.get('source') != 'plone.session'):
            return None
        else:
            session_source = self.session

            ticket = credentials.get('cookie')

            if session_source._shared_secret is not None:
                ticket_data = tktauth.validateTicket(
                    session_source._shared_secret,
                    ticket,
                    timeout=session_source.timeout,
                    mod_auth_tkt=session_source.mod_auth_tkt)
            else:
                ticket_data = None
                manager = queryUtility(IKeyManager)
                if manager is None:
                    return None
                for secret in manager[u"_system"]:
                    if secret is None:
                        continue

                    ticket_data = tktauth.validateTicket(
                        secret,
                        ticket,
                        timeout=session_source.timeout,
                        mod_auth_tkt=session_source.mod_auth_tkt)

                    if ticket_data is not None:
                        break

            if ticket_data is None:
                return None

            (digest, userid, tokens, user_data, timestamp) = ticket_data
            pas = self._getPAS()
            info = pas._verifyUser(pas.plugins, user_id=userid)

            if info is None:
                return None

            login = info['login']

        cookie_val = self.getCookie()

        # get max seats from member data property or cache and default to 1 if not set
        try:
            max_seats = self.getMaxSeatsForLogin(login)
        except:
            traceback.print_exc()

        # When debugging, print the maxSeats value that was resolved
        if self.DEBUG:
            print "authenticateCredentials():: Max Seats is " + str(max_seats)

        if max_seats == 1:
            if cookie_val:
                # A cookie value is there.  If it's the same as the value
                # in our mapping, it's fine.  Otherwise we'll force a
                # logout.
                existing = self.mapping1.get(login, None)

                if self.DEBUG:
                    if existing:
                        print "authenticateCredentials():: cookie_val is " + cookie_val + ", and active tokens are: " + ', '.join(
                            existing['tokens'])

                if existing and cookie_val not in existing['tokens']:
                    # The cookies values differ, we want to logout the
                    # user by calling resetCredentials.  Note that this
                    # will eventually call our own resetCredentials which
                    # will cleanup our own cookie.
                    try:
                        self.resetAllCredentials(request, response)
                        pas_instance.plone_utils.addPortalMessage(
                            _(u"Someone else logged in under your name.  You have been \
                            logged out"), "error")
                    except:
                        traceback.print_exc()
                elif existing is None:
                    # The browser has the cookie but we don't know about
                    # it.  Let's reset our own cookie:
                    self.setCookie('')

            else:
                # When no cookie is present, we generate one, store it and
                # set it in the response:
                cookie_val = uuid()
                # do some cleanup in our mappings
                existing = self.mapping1.get(login)

                if existing and 'tokens' in existing:
                    try:
                        if existing['tokens'][0] in self.mapping2:
                            del self.mapping2[existing['tokens'][0]]
                    except:
                        pass

                try:
                    from_ip = self.get_ip(request)
                except:
                    traceback.print_exc()

                now = DateTime()
                self.mapping1[login] = {'tokens': []}
                self.mapping1[login]['tokens'].append(cookie_val)
                self.mapping2[cookie_val] = {
                    'userid':
                    login,
                    'ip':
                    from_ip,
                    'startTime':
                    now,
                    'expireTime':
                    DateTime(now.asdatetime() + self.time_to_persist_cookies)
                }
                self.setCookie(cookie_val)
        else:
            # Max seats is not 1. Treat this as a floating licenses scenario.
            # Nobody is logged out, but once the max seats threshold is reached,
            # active tokens must expire before new users may log in.
            if cookie_val:
                # When the cookie value is there, try to verify it or activate it if is it not added yet
                self.verifyToken(cookie_val, login, max_seats, request,
                                 response)
            else:
                if self.DEBUG:
                    print "authenticateCredentials:: Try to issue a token because there is no cookie value."

                # When no cookie is present, attempt to issue a token and use the cookie to store it
                self.issueToken(login, max_seats, request, response)
                # if max_seats are filled, then force logout
                if self.isLoginAtCapacity(login, max_seats):
                    self.forceLogoutForUser(login, request, response)

        return None  # Note that we never return anything useful

    security.declarePrivate('getSeatsPropertiesForLogin')

    def getSeatsPropertiesForLogin(self, login):

        # initialize max_seats at 1
        max_seats = 1
        seat_timeout = 5  # initialize to 5 minutes

        if self.login_member_data_mapping is None:
            self.login_member_data_mapping = OOBTree(
            )  # if this has not been initialized then do it now
            if self.DEBUG:
                print "Initialized the Login Member Data Mapping"

        # if the max_seats has a valid cached value, then use it
        cached_member_data = self.login_member_data_mapping.get(login, None)

        now = DateTime()
        if cached_member_data and 'expireTime' in cached_member_data and 'maxSeats' in cached_member_data and 'seatTimeoutInMinutes' in cached_member_data and now < cached_member_data[
                'expireTime']:
            max_seats = cached_member_data['maxSeats']
            seat_timeout = cached_member_data['seatTimeoutInMinutes']
        else:
            member = self.getMember(login)
            # get the max_seats property from the member data tool
            if member is not None:
                max_seats = member.getProperty("max_seats")
                seat_timeout = member.getProperty("seat_timeout_in_minutes")
                # cache the max_seats for login
                td_seat_timeout = datetime.timedelta(minutes=seat_timeout)
                self.login_member_data_mapping[login] = {
                    'maxSeats': int(max_seats),
                    'seatTimeoutInMinutes': float(seat_timeout),
                    'expireTime': DateTime(now.asdatetime() + td_seat_timeout)
                }

        return {
            'maxSeats': int(max_seats),
            'seatTimeoutInMinutes': float(seat_timeout)
        }

    def getMember(self, email):
        """ Returns a member object for the given username """

        member = None

        try:
            member = api.user.get(username=email)
        except:
            if self.DEBUG:
                traceback.print_exc()

        return member

    security.declarePrivate('getMaxSeatsForLogin')

    def getMaxSeatsForLogin(self, login):
        """Returns the max_seats property for a given login
        """
        seats_properties = self.getSeatsPropertiesForLogin(login)
        max_seats = 1  # default to 1 seat

        if seats_properties and 'maxSeats' in seats_properties:
            max_seats = seats_properties['maxSeats']
        return max_seats

    security.declarePrivate('getSeatTimeoutInMinutesForLogin')

    def getSeatTimeoutInMinutesForLogin(self, login):
        """Returns the seat_timeout_in_minutes property for a given login
        """
        seats_properties = self.getSeatsPropertiesForLogin(login)
        seat_timeout_in_minutes = 5  # default to 5 minutes

        if seats_properties and 'seatTimeoutInMinutes' in seats_properties:
            seat_timeout_in_minutes = seats_properties['seatTimeoutInMinutes']
        return seat_timeout_in_minutes

    security.declarePrivate('resetCredentials')

    def resetCredentials(self, request, response):
        """See ICredentialsResetPlugin.
        """
        alsoProvides(request, IDisableCSRFProtection)

        if self.DEBUG:
            print "resetCredentials()::"

        try:
            cookie_val = self.getCookie()
            if cookie_val:
                loginandinfo = self.mapping2.get(cookie_val, None)
                if loginandinfo:
                    login = loginandinfo['userid']
                    del self.mapping2[cookie_val]
                    existing = self.mapping1.get(login, None)
                    if existing and 'tokens' in existing and cookie_val in existing[
                            'tokens']:
                        existing['tokens'].remove(cookie_val)
                        assert cookie_val not in existing['tokens']

            self.setCookie('')
        except:
            if self.DEBUG:
                traceback.print_exc()

    security.declarePrivate('resetAllCredentials')

    def resetAllCredentials(self, request, response):
        """Call resetCredentials of all plugins.

        o This is not part of any contract.
        """
        # This is arguably a bit hacky, but calling
        # pas_instance.resetCredentials() will not do anything because
        # the user is still anonymous.  (I think it should do
        # something nevertheless.)

        alsoProvides(request, IDisableCSRFProtection)
        pas_instance = self._getPAS()
        plugins = pas_instance._getOb('plugins')
        cred_resetters = plugins.listPlugins(ICredentialsResetPlugin)
        for resetter_id, resetter in cred_resetters:
            resetter.resetCredentials(request, response)

    security.declarePrivate('getCookie')

    def getCookie(self):
        """Helper to retrieve the cookie value from either cookie or
        session, depending on policy.
        """
        request = self.REQUEST
        alsoProvides(request, IDisableCSRFProtection)

        cookie = request.get(self.cookie_name, '')

        if self.DEBUG:
            print "getCookie():: " + str(unquote(cookie))

        return unquote(cookie)

    security.declarePrivate('setCookie')

    def setCookie(self, value):
        """Helper to set the cookie value to either cookie or
        session, depending on policy.

        o Setting to '' means delete.
        """
        value = quote(value)
        request = self.REQUEST
        alsoProvides(request, IDisableCSRFProtection)

        response = request['RESPONSE']

        if value:
            response.setCookie(self.cookie_name, value, path='/')
        else:
            response.expireCookie(self.cookie_name, path='/')

        if self.DEBUG:
            print "setCookie():: " + str(value)

    security.declarePrivate('clearSeatsPropertiesForLogin')

    def clearSeatsPropertiesForLogin(self, login):
        """ Clears the cached seats properties for the given user. """
        isCached = self.login_member_data_mapping and self.login_member_data_mapping.get(
            login, None) is not None

        if isCached:
            del self.login_member_data_mapping[login]

    security.declarePrivate('clearStaleTokens')

    def clearStaleTokens(self, login):
        """Clear tokens that should be expired or that have no corresponding mapping and thus have been orphaned."""
        if self.DEBUG:
            print "clearStaleTokens:: " + login

        existing = self.mapping1.get(login, None)

        if existing and 'tokens' in existing:
            # for each token, remove if stale
            for token in existing['tokens']:
                tokenInfo = self.mapping2.get(token, None)

                now = DateTime()

                # if the token info does not exist, then remove it from the active tokens
                if tokenInfo is None:
                    if self.DEBUG:
                        print "clearStaleTokens:: Remove token (%s) because it was orphaned." % (
                            token)
                    # remove from the active tokens for the given login
                    self.mapping1[login]['tokens'].remove(token)

                # if the expireTime for the token has passed, then expire the token
                if tokenInfo and 'expireTime' in tokenInfo and tokenInfo[
                        'expireTime'] < now:
                    if self.DEBUG:
                        print "clearStaleTokens:: Remove token (%s) because expireTime(%s). startTime(%s)" % (
                            token, tokenInfo['expireTime'],
                            tokenInfo['startTime'])
                    # remove from the active tokens for the given login
                    self.mapping1[login]['tokens'].remove(token)
                    del self.mapping2[token]

    security.declarePrivate('clearAllTokensForUser')

    def clearAllTokensForUser(self, login):
        """Clear all tokens for a specific user."""
        if self.DEBUG:
            print "clearAllTokensForUser:: " + login

        existing = self.mapping1.get(login, None)

        if existing and 'tokens' in existing:
            # for each token, remove if stale
            for token in existing['tokens']:
                tokenInfo = self.mapping2.get(token, None)

                now = DateTime()

                # remove it from the active tokens
                if self.DEBUG:
                    print "clearAllTokensForUser:: Remove token (%s) because it was orphaned." % (
                        token)
                # remove from the active tokens for the given login
                self.mapping1[login]['tokens'].remove(token)

                # if there is also a corresponding mapping for tokenInfo, then delete the mapping
                if tokenInfo:
                    del self.mapping2[token]

    security.declarePrivate('issueToken')

    def issueToken(self, login, max_seats, request, response):
        """ Creates a uid and stores in a cookie browser-side
        """
        # When no cookie is present, we generate one, store it and
        # set it in the response:

        alsoProvides(request, IDisableCSRFProtection)

        cookie_val = uuid()

        if self.DEBUG:
            print "issueToken::" + cookie_val

        self.setCookie(cookie_val)

    security.declarePrivate('forceLogoutForUser')

    def forceLogoutForUser(self, login, request, response):
        """ Forces logout. """
        # Logout the
        # user by calling resetCredentials.  Note that this
        # will eventually call our own resetCredentials which
        # will cleanup our own cookie.

        alsoProvides(request, IDisableCSRFProtection)
        try:
            self.resetAllCredentials(request, response)
            self._getPAS().plone_utils.addPortalMessage(
                _(u"The maximum number of simultaneous logins for this user has been exceeded.  You have been \
                logged out."), "error")
        except:
            traceback.print_exc()

    security.declarePrivate('isLoginAtCapacity')

    def isLoginAtCapacity(self, login, max_seats):
        """ Returns whether or not the login has filled all available seats. """

        # clear stale tokens to make sure we use the correct token count
        self.clearStaleTokens(login)

        seat_timeout = 5  # default if there is a problem with the member property
        iTokens = 0  # assume no tokens are active until proven otherwise
        existing = self.mapping1.get(login)
        if existing and 'tokens' in existing:
            iTokens = len(existing['tokens'])

        # return whether max_seats have been filled
        return iTokens >= max_seats

    security.declarePrivate('verifyToken')

    def verifyToken(self, token, login, max_seats, request, response):
        """ Activates a token by putting it in the tokens[] array of mapping1[login] if it is not already present. """

        alsoProvides(request, IDisableCSRFProtection)

        isVerified = False  # it is verified if it is already in the active tokens list server-side
        seat_timeout = 5  # default if there is a problem with the member property
        iTokens = 0  # assume no tokens are active until proven otherwise
        existing = self.mapping1.get(login)
        if existing and 'tokens' in existing:
            iTokens = len(existing['tokens'])

            isVerified = token in existing['tokens']

            if self.DEBUG:
                print "authenticateCredentials():: cookie_val is " + token + ", and active tokens are: " + ', '.join(
                    existing['tokens'])
        else:
            self.mapping1[login] = {
                'tokens': []
            }  # initialize tokens array for this login

        if self.DEBUG:
            print "verifyToken:: login = %s, active = %i, max = %i" % (
                login, iTokens, max_seats)

        try:
            # for seats > 1, use member property for cookie timeout value
            seat_timeout = self.getSeatTimeoutInMinutesForLogin(login)
            td_seat_timeout = datetime.timedelta(minutes=seat_timeout)
        except:
            pass

        # if this is the last token to issue,
        # then go ahead and clear stale tokens for this login
        if not isVerified and iTokens >= max_seats - 1:
            self.clearStaleTokens(login)

        try:
            from_ip = self.get_ip(request)
        except:
            traceback.print_exc()

        if isVerified:
            # just extend it
            now = DateTime()
            self.mapping2[token] = {
                'userid': login,
                'ip': from_ip,
                'startTime': now,
                'expireTime': DateTime(now.asdatetime() + td_seat_timeout)
            }

            if self.DEBUG:
                print "verifyToken:: logon= %s, IP= %s, startTime= %s, expireTime= %s" % (
                    self.mapping2.get(token)['userid'], from_ip,
                    self.mapping2.get(token)['startTime'],
                    self.mapping2.get(token)['expireTime'])
        elif iTokens < max_seats:

            now = DateTime()
            # if it already exists, add it
            self.mapping1[login]['tokens'].append(token)
            self.mapping2[token] = {
                'userid': login,
                'ip': from_ip,
                'startTime': now,
                'expireTime': DateTime(now.asdatetime() + td_seat_timeout)
            }

            if self.DEBUG:
                print "verifyToken:: after activate token, active tokens = " + ', '.join(
                    self.mapping1[login]['tokens'])

            # since this was activated, just ensure that the cookie in the browser reflects what is server side
            self.setCookie(token)
        else:
            # cannot issue cookie, so clear in browser-side
            #self.setCookie('')

            # if the token is not able to be issued because of max_seats filled,
            # then force logout, and show the message

            # Logout the
            # user by calling resetCredentials.  Note that this
            # will eventually call our own resetCredentials which
            # will cleanup our own cookie.
            try:
                self.resetAllCredentials(request, response)
                self._getPAS().plone_utils.addPortalMessage(
                    _(u"The maximum number of simultaneous logins for this user has been exceeded.  You have been \
                    logged out."), "error")
            except:
                traceback.print_exc()

    security.declareProtected(Permissions.manage_users, 'clearAllTokens')

    def clearAllTokens(self):
        """Clear all server side tokens.  Use only in testing."""
        if self.DEBUG:
            print "clearAllTokens():: called"

        try:
            self.mapping1.clear()
            self.mapping2.clear()
            self.setCookie('')
        except:
            traceback.print_exc()

    security.declareProtected(Permissions.manage_users, 'cleanUp')

    def cleanUp(self):
        """Clean up storage.

        Call this periodically through the web to clean up old entries
        in the storage."""
        now = DateTime()

        def cleanStorage(mapping):
            count = 0
            for key, obj in mapping.items():
                # if this is not a dictionary, then it is a stale entry (could be tuple from old scheme)
                if not isinstance(obj, dict):
                    del mapping[key]
                    count += 1
                elif 'expireTime' in obj and obj['expireTime'] < now:
                    del mapping[key]

                    # if the mapping2 deletes its token by UID, make sure that the mapping1 removes that token as well
                    for userid, info in self.mapping1.items():
                        try:
                            info['tokens'].remove(
                                key
                            )  # remove the UID from the tokens for that login
                        except:
                            pass
                    count += 1
            return count

        for mapping in self.mapping2, self.login_member_data_mapping:
            count = cleanStorage(mapping)

        return "%s entries deleted." % count

    security.declarePrivate(Permissions.manage_users, 'get_ip')

    def get_ip(self, request):
        """ Extract the client IP address from the HTTP request in a proxy-compatible way.
        @return: IP address as a string or None if not available"""

        if "HTTP_X_FORWARDED_FOR" in request.environ:
            # Virtual host
            ip = request.environ["HTTP_X_FORWARDED_FOR"]
        elif "HTTP_HOST" in request.environ:
            # Non-virtualhost
            ip = request.environ["REMOTE_ADDR"]
        else:
            # Should not reach here
            ip = '0.0.0.0'

        if self.DEBUG:
            print "get_ip:: " + ip
        return ip
Exemple #35
0
class CSGooglePlusUsers(BasePlugin):
    """PAS plugin for authentication against Google+.
Here, we implement a number of PAS interfaces, using a session managed
by Beaker (via collective.beaker) to temporarily store the values we
have captured.
"""

    # List PAS interfaces we implement here
    implements(ICSGooglePlusPlugin, IExtractionPlugin, ICredentialsResetPlugin,
               IAuthenticationPlugin, IPropertiesPlugin,
               IUserEnumerationPlugin, IUserFactoryPlugin)

    def __init__(self, id, title=None):
        self.__name__ = self.id = id
        self.title = title
        self._storage = OOBTree()

    #
    # IExtractionPlugin
    #
    def extractCredentials(self, request):
        """This method is called on every request to extract credentials.
In our case, that means looking for the values we store in the
session.
o Return a mapping of any derived credentials.
o Return an empty mapping to indicate that the plugin found no
appropriate credentials.
"""

        # Get the session from Beaker.

        session = ISession(request, None)

        if session is None:
            return None

        # We have been authenticated and we have a session that has not yet
        # expired:

        if SessionKeys.userId in session:

            return {
                'src': self.getId(),
                'userid': session[SessionKeys.userId],
                'username': session[SessionKeys.userId],
            }

        return None

    #
    # IAuthenticationPlugin
    #

    def authenticateCredentials(self, credentials):
        """This method is called with the credentials that have been
extracted by extractCredentials(), to determine whether the user is
authenticated or not.
We basically trust our session data, so if the session contains a
user, we treat them as authenticated. Other systems may have more
stringent needs, but we should avoid an expensive check here, as this
method may be called very often - at least once per request.
credentials -> (userid, login)
o 'credentials' will be a mapping, as returned by extractCredentials().
o Return a tuple consisting of user ID (which may be different
from the login name) and login
o If the credentials cannot be authenticated, return None.
"""

        # If we didn't extract, ignore
        if credentials.get('src', None) != self.getId():
            return

        # We have a session, which was identified by extractCredentials above.
        # Trust that it extracted the correct user id and login name

        if ('userid' in credentials and 'username' in credentials):
            return (
                credentials['userid'],
                credentials['username'],
            )

        return None

    #
    # ICredentialsResetPlugin
    #

    def resetCredentials(self, request, response):
        """This method is called if the user logs out.
Here, we simply destroy their session.
"""
        session = ISession(request, None)
        if session is None:
            return

        session.delete()

    #
    # IPropertiesPlugin
    #

    def getPropertiesForUser(self, user, request=None):
        """This method is called whenever Plone needs to get properties for
a user. We return a dictionary with properties that map to those
Plone expect.
user -> {}
o User will implement IPropertiedUser.
o Plugin should return a dictionary or an object providing
IPropertySheet.
o Plugin may scribble on the user, if needed (but must still
return a mapping, even if empty).
o May assign properties based on values in the REQUEST object, if
present
"""
        # If this is a GooglePlus User, it implements IGooglePlusUser
        if not IGooglePlusUser.providedBy(user):
            return {}

        else:
            user_data = self._storage.get(user.getId(), None)
            if user_data is None:
                return {}

            return user_data

    #
    # IUserEnumerationPlugin
    #

    def enumerateUsers(self,
                       id=None,
                       login=None,
                       exact_match=False,
                       sort_by=None,
                       max_results=None,
                       **kw):
        """This function is used to search for users.
We don't implement a search of all of GooglePlus (!), but it's important
that we allow Plone to search for the currently logged in user and get
a result back, so we effectively implement a search against only one
value.
-> ( user_info_1, ... user_info_N )
o Return mappings for users matching the given criteria.
o 'id' or 'login', in combination with 'exact_match' true, will
return at most one mapping per supplied ID ('id' and 'login'
may be sequences).
o If 'exact_match' is False, then 'id' and / or login may be
treated by the plugin as "contains" searches (more complicated
searches may be supported by some plugins using other keyword
arguments).
o If 'sort_by' is passed, the results will be sorted accordingly.
known valid values are 'id' and 'login' (some plugins may support
others).
o If 'max_results' is specified, it must be a positive integer,
limiting the number of returned mappings. If unspecified, the
plugin should return mappings for all users satisfying the criteria.
o Minimal keys in the returned mappings:
'id' -- (required) the user ID, which may be different than
the login name
'login' -- (required) the login name
'pluginid' -- (required) the plugin ID (as returned by getId())
'editurl' -- (optional) the URL to a page for updating the
mapping's user
o Plugin *must* ignore unknown criteria.
o Plugin may raise ValueError for invalid criteria.
o Insufficiently-specified criteria may have catastrophic
scaling issues for some implementations.
"""

        if exact_match:
            if id is not None:
                user_data = self._storage.get(id, None)
                if user_data is not None:
                    return ({
                        'id': id,
                        'login': id,
                        'pluginid': self.getId(),
                    }, )
            return ()

        else:
            # XXX: return all users, without any matching
            data = []
            for id, user_data in self._storage.items():
                data.append({
                    'id': id,
                    'login': id,
                    'pluginid': self.getId(),
                })
            return data

    # IUserFactoryPlugin interface
    def createUser(self, user_id, name):
        # Create a GooglePlusUser just if this is a GooglePlus User id
        user_data = self._storage.get(user_id, None)
        if user_data is not None:
            return GooglePlusUser(user_id, name)

        return None
Exemple #36
0
class CategoryIndex(Persistent):
    def __init__(self):
        self._idxCategItem = OOBTree()

    def dump(self):
        return list(self._idxCategItem.items())

    def _indexConfById(self, categid, confid):
        # only the more restrictive setup is taken into account
        categid = str(categid)
        if self._idxCategItem.has_key(categid):
            res = self._idxCategItem[categid]
        else:
            res = []
        res.append(confid)
        self._idxCategItem[categid] = res

    def unindexConf(self, conf):
        confid = str(conf.getId())
        self.unindexConfById(confid)

    def unindexConfById(self, confid):
        for categid in self._idxCategItem.keys():
            if confid in self._idxCategItem[categid]:
                res = self._idxCategItem[categid]
                res.remove(confid)
                self._idxCategItem[categid] = res

    def reindexCateg(self, categ):
        for subcat in categ.getSubCategoryList():
            self.reindexCateg(subcat)
        for conf in categ.getConferenceList():
            self.reindexConf(conf)

    def reindexConf(self, conf):
        self.unindexConf(conf)
        self.indexConf(conf)

    def indexConf(self, conf):
        categs = conf.getOwnerPath()
        level = 0
        for categ in conf.getOwnerPath():
            if conf.getFullVisibility() > level:
                self._indexConfById(categ.getId(), conf.getId())
            level += 1
        if conf.getFullVisibility() > level:
            self._indexConfById("0", conf.getId())

    def getItems(self, categid):
        categid = str(categid)
        if self._idxCategItem.has_key(categid):
            return self._idxCategItem[categid]
        else:
            return []

    def _check(self, dbi=None):
        """
        Performs some sanity checks
        """
        i = 0
        from MaKaC.conference import ConferenceHolder
        confIdx = ConferenceHolder()._getIdx()

        for cid, confs in self._idxCategItem.iteritems():
            for confId in confs:
                # it has to be in the conference holder
                if confId not in confIdx:
                    yield "[%s] '%s' not in ConferenceHolder" % (cid, confId)
                # the category has to be one of the owners
                elif cid not in (map(
                        lambda x: x.id,
                        ConferenceHolder().getById(confId).getOwnerPath()) +
                                 ['0']):
                    yield "[%s] Conference '%s' is not owned" % (cid, confId)
            if dbi and i % 100 == 99:
                dbi.sync()
            i += 1
class ListLookup(SimpleItem):
    """ An implementation of IListLookup which uses to To address in a message,
        to lookup which list to send a message to.

    Some framework setup:
        >>> import Products.Five
        >>> from Products.Five import zcml
        >>> zcml.load_config('meta.zcml', Products.Five)
        >>> zcml.load_config('permissions.zcml', Products.Five)
        >>> zcml.load_config("configure.zcml", Products.Five.site)
        >>> from Products.listen.utilities import tests
        >>> zcml.load_config('configure.zcml', tests)

    Now let's make a fake mailing list in our site
        >>> ml = tests.install_fake_ml(self.folder, suppress_events=True)
        >>> from zope.app.component.hooks import setSite
        >>> setSite(ml)

    Create our utility:
        >>> from Products.listen.utilities.list_lookup import ListLookup, ListDoesNotExist
        >>> ll = ListLookup('list_lookup').__of__(self.folder)

    Register the list:
        >>> ll.registerList(ml)
        >>> ll.getListForAddress(ml.mailto) == ml
        True

    Attempt to register it under another address:
        >>> from zope.app.exception.interfaces import UserError
        >>> ml.mailto = '*****@*****.**'
        >>> try:
        ...     ll.registerList(ml)
        ... except UserError:
        ...     print "Raised expected error"
        ...
        Raised expected error
        >>> ll.getListForAddress(ml.mailto) == ml
        False

    Update the list address to the new address:
        >>> ll.updateList(ml)
        >>> ll.getListForAddress(ml.mailto) == ml
        True

    Add another list with the same address:
        >>> from Products.listen.utilities.tests import FakeMailingList
        >>> ml2 = FakeMailingList('ml2')
        >>> ml_id = self.folder._setObject('ml2', ml2)
        >>> ml2 = getattr(self.folder, ml_id)
        >>> ml2.mailto = ml.mailto
        >>> try:
        ...     ll.registerList(ml2)
        ... except UserError:
        ...     print "Raised expected error"
        ...
        Raised expected error

    Try to update an unregistered list:
        >>> try:
        ...     ll.updateList(ml2)
        ... except UserError:
        ...     print "Raised expected error"
        ...
        Raised expected error

    Let's try unregistering:
        >>> ll.unregisterList(ml)
        >>> ll.getListForAddress(ml.mailto)

    Unregistering a list that isn't registered shouldn't cause any problems:
        >>> ll.unregisterList(ml2)

    Let's send a mail:
        >>> ll.registerList(ml)
        >>> ll.deliverMessage({'Mail':'x-original-to: [email protected]\\r\\nTo: [email protected]\\r\\nFrom: [email protected]\\r\\nSubject: Bogus\\r\\n\\r\\nTest'})
        'Success [email protected]'

    And with an SMTP that doesn't set x-original-to:
        >>> ll.deliverMessage({'Mail':'To: [email protected]\\r\\nFrom: [email protected]\\r\\nSubject: Bogus\\r\\n\\r\\nTest'})
        'Success [email protected]'

    And another to a bad address:
        >>> from zExceptions import NotFound
        >>> try:
        ...     ll.deliverMessage({'Mail':'x-original-to: [email protected]\\r\\nTo: [email protected]\\r\\nFrom: [email protected]\\r\\nSubject: Bogus\\r\\n\\r\\nTest'})
        ... except ListDoesNotExist:
        ...     print "Raised expected error"
        ...
        Raised expected error
    """

    implements(IListLookup)

    def __init__(self, id='listen_list_lookup'):
        self.id = id
        self._mapping = OOBTree()
        self._reverse = OOBTree()
        self.__name__ = 'listen_lookup'

    # We need to provide a __parent__ property to be registerable
    def _getParent(self):
        return aq_parent(self)

    #__parent__ = property(_getParent)

    def registerList(self, ml):
        """See IListLookup interface documentation"""
        address = ml.mailto
        # normalize case
        if not address:
            # Our list does not have an address yet, this only happens when
            # the add form wasn't used.
            return
        address = address.lower()
        path = '/'.join(ml.getPhysicalPath())
        current_addr = self._reverse.get(path, None)
        current_path = self._mapping.get(address, None)
        if current_addr is not None:
            raise UserError, _("This list is already registered, use "\
                             "updateList to change the address.")
        if current_path is not None:
            raise UserError, _("A list is already registered for this address,"\
                             " you must unregister it first.")
        self._mapping[address] = path
        self._reverse[path] = address

    def updateList(self, ml):
        """See IListLookup interface documentation"""
        address = ml.mailto or ''
        # normalize case
        address = address.lower()
        path = '/'.join(ml.getPhysicalPath())
        current_addr = self._reverse.get(path, None)
        current_path = self._mapping.get(address, None)
        if (current_path is None and current_addr is not None
                and current_addr != address):
            # The mailing list address has changed to one which is unknown
            del self._mapping[current_addr]
            self._reverse[path] = address
            self._mapping[address] = path
        elif current_addr == address and current_path == path:
            # Nothing has changed, do nothing
            pass
        elif current_addr is None and current_path is None:
            # The list is not registered at all, this happens when the addform
            # was not used, stupid CMF
            self.registerList(ml)
        else:
            # The new address is already registered
            raise UserError, _("A list is already registered for this address")

    def unregisterList(self, ml):
        """See IListLookup interface documentation"""
        address = ml.mailto
        # normalize case
        current_ml = self._mapping.get(address, None)
        if not address:
            # We are deleting a list without an address
            if current_ml is not None:
                del self._reverse[current_ml]
            return
        address = address.lower()
        if current_ml == '/'.join(ml.getPhysicalPath()):
            del self._mapping[address]
            del self._reverse[current_ml]

    def getListForAddress(self, address):
        """See IListLookup interface documentation"""
        list_path = self._mapping.get(address, None)
        if list_path is not None:
            site = getSite()
            ml = site.unrestrictedTraverse(list_path)
            return aq_inner(ml)
        return None

    def deliverMessage(self, request):
        """See IListLookup interface documentation"""
        # XXX raising NotFound annoyingly hides the real problem so
        # I've added a bunch of logging.  I propose in the future we
        # change NotFound to something that actually gets logged.  But
        # I'm afraid to do that now because I don't know if we somehow
        # depend on getting a 404 here.
        message = str(request.get(MAIL_PARAMETER_NAME, None))
        if message is not None:
            message = message_from_string(message)
        else:
            logger.error("request.get(%s) returned None" % MAIL_PARAMETER_NAME)
            raise NotFound, _("The message destination cannot be deterimined.")
        # preferentially use the x-original-to header (is this postfix only?),
        # so that mails to multiple lists are handled properly
        address = message.get('x-original-to', None)
        if not address:
            address = message['to']
            cc = message['cc']
            if address and cc:
                address = address + ', ' + cc
            elif cc:
                address = cc
        # normalize case
        if not address:
            import pprint
            logger.warn("No destination found in headers:\n%s" %
                        pprint.pformat(message))
            raise NotFound, _("The message destination cannot be deterimined.")
        address = address.lower()
        if '-manager@' in address:
            address = address.replace('-manager@', '@')
        address_list = AddressList(address)
        for ml_address in address_list:
            ml = self.getListForAddress(ml_address[1])
            if ml is not None:
                break
        else:
            # raise an error on bad requests, so that the SMTP server can
            # send a proper failure message.
            logger.warn("no list found for any of %r" % str(address_list))
            raise ListDoesNotExist, _("The message address does not correspond to a "\
                             "known mailing list.")
        setSite(ml)
        return ml.manage_mailboxer(request)

    def showAddressMapping(self):
        return [{'address': k, 'path': v} for k, v in self._mapping.items()]

    def purgeInvalidEntries(self):
        counter = 0
        for path in self._reverse.keys():
            list_obj = self.unrestrictedTraverse(path, None)
            if list_obj is None:
                address = self._reverse[path]
                del self._mapping[address]
                del self._reverse[path]
                counter += 1
        return counter
Exemple #38
0
class TokenStorage(UniqueObject, SimpleItem, persistent.Persistent):
    isPrincipiaFolderish = True  # Show up in the ZMI
    security = ClassSecurityInfo()
    meta_type = 'TokenStorage'
    id = 'onetimetoken_storage'

    _timedelta = 504  # three weeks

    def __init__(self, request):
        self._tokens = OOBTree()

    def getTokens(self):
        """ Return all usernames and dates without tokens, read only
        """
        return self._tokens.values()

    security.declareProtected(ManageUsers, 'setToken')

    def setToken(self,
                 userId=None,
                 generate_username_callback=None,
                 generate_username_kwargs=None):
        """ Generate token for user or create one-time-user + token
        """
        token = ''
        m_tool = getToolByName(self, 'portal_membership')
        if not userId:
            if generate_username_callback:
                userId = generate_username_callback(
                    **(generate_username_kwargs or {}))
            else:
                userId = self.uniqueString()
            done = m_tool.acl_users.source_users.doAddUser(
                userId, self.uniqueString())
            assert done, "User could not be created for OneTimeToken!"

        expiry = str(self.expirationDate())
        token = self.uniqueString()

        self._tokens[token] = (userId, expiry)
        login = "******" % (userId, token)

        # encode the login string to make it url safe
        token = encodestring(login)

        return token

    security.declarePublic('verifyToken')

    def verifyToken(self, loginCode):
        """
        """
        try:
            userId, token = decodestring(loginCode).split(':')
        except:
            raise TokenError('InvalidLoginCodeError')

        try:
            u, expiry = self._tokens[token]
        except KeyError:
            raise TokenError('InvalidTokenError')

        if self.expired(expiry):
            raise TokenError('ExpiredExpiryError')

        if not u == userId:
            raise TokenError('InvalidUserError')

        del self._tokens[token]

        return u

    security.declarePublic('deleteTemporaryUser')

    def deleteTemporaryUser(self, userId):
        """
        """
        m_tool = getToolByName(self, 'portal_membership')
        return m_tool.acl_users.source_users.doDeleteUser(userId)

    security.declarePrivate('uniqueString')

    def uniqueString(self):
        """Returns a string that is random and unguessable, or at
        least as close as possible."""
        # this is the informal UUID algorithm of
        # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/213761
        # by Carl Free Jr
        t = long(time.time() * 1000)
        r = long(random.random() * 100000000000000000L)
        try:
            a = socket.gethostbyname(socket.gethostname())
        except:
            # if we can't get a network address, just imagine one
            a = random.random() * 100000000000000000L
        data = str(t) + ' ' + str(r) + ' ' + str(a)  #+' '+str(args)
        data = md5.md5(data).hexdigest()
        return str(data)

    security.declarePrivate('expirationDate')

    def expirationDate(self):
        """Returns a DateTime for exipiry of a request from the
        current time.

        This is used by housekeeping methods (like clearEpired)
        and stored in reset request records."""
        if not hasattr(self, '_timedelta'):
            self._timedelta = 168
        try:
            if isinstance(self._timedelta, datetime.timedelta):
                expire = datetime.datetime.utcnow() + self._timedelta
                return DateTime(expire.year, expire.month, expire.day,
                                expire.hour, expire.minute, expire.second,
                                'UTC')
        except NameError:
            pass  # that's okay, it must be a number of hours...
        expire = time.time() + self._timedelta * 3600  # 60 min/hr * 60 sec/min
        return DateTime(expire)

    security.declarePrivate('expired')

    def expired(self, datetime, now=None):
        """Tells whether a DateTime or timestamp 'datetime' is expired
        with regards to either 'now', if provided, or the current
        time."""
        if not now:
            now = DateTime()
        return now.greaterThanEqualTo(datetime)

    security.declarePrivate('clearExpired')

    def clearExpired(self, days=0):
        """Destroys all expired reset request records.
        Parameter 'days' controls how many days past expired it must be to clear token.
        """
        for token, record in self._tokens.items():
            stored_user, expiry = record
            if self.expired(DateTime(expiry), DateTime() - days):
                del self._tokens[token]
                self.deleteTemporaryUser(stored_user)
Exemple #39
0
class HBTreeFolder2Base (Persistent):
    """Base for BTree-based folders.
    """

    security = ClassSecurityInfo()

    manage_options=(
        ({'label':'Contents', 'action':'manage_main',},
         ) + Folder.manage_options[1:]
        )

    security.declareProtected(view_management_screens,
                              'manage_main')
    manage_main = DTMLFile('contents', globals())

    _htree = None      # OOBTree: { id -> object }
    _count = None     # A BTrees.Length
    _v_nextid = 0     # The integer component of the next generated ID
    title = ''
    _tree_list = None


    def __init__(self, id=None):
        if id is not None:
            self.id = id
        self._initBTrees()

    def _initBTrees(self):
        self._htree = OOBTree()
        self._count = Length()
        self._tree_list = PersistentMapping()

    def _populateFromFolder(self, source):
        """Fill this folder with the contents of another folder.
        """
        for name in source.objectIds():
            value = source._getOb(name, None)
            if value is not None:
                self._setOb(name, aq_base(value))


    security.declareProtected(view_management_screens, 'manage_fixCount')
    def manage_fixCount(self):
        """Calls self._fixCount() and reports the result as text.
        """
        old, new = self._fixCount()
        path = '/'.join(self.getPhysicalPath())
        if old == new:
            return "No count mismatch detected in HBTreeFolder2 at %s." % path
        else:
            return ("Fixed count mismatch in HBTreeFolder2 at %s. "
                    "Count was %d; corrected to %d" % (path, old, new))


    def _fixCount(self):
        """Checks if the value of self._count disagrees with
        len(self.objectIds()). If so, corrects self._count. Returns the
        old and new count values. If old==new, no correction was
        performed.
        """
        old = self._count()
        new = len(self.objectIds())
        if old != new:
            self._count.set(new)
        return old, new


    security.declareProtected(view_management_screens, 'manage_cleanup')
    def manage_cleanup(self):
        """Calls self._cleanup() and reports the result as text.
        """
        v = self._cleanup()
        path = '/'.join(self.getPhysicalPath())
        if v:
            return "No damage detected in HBTreeFolder2 at %s." % path
        else:
            return ("Fixed HBTreeFolder2 at %s.  "
                    "See the log for more details." % path)


    def _cleanup(self):
        """Cleans up errors in the BTrees.

        Certain ZODB bugs have caused BTrees to become slightly insane.
        Fortunately, there is a way to clean up damaged BTrees that
        always seems to work: make a new BTree containing the items()
        of the old one.

        Returns 1 if no damage was detected, or 0 if damage was
        detected and fixed.
        """
        def hCheck(htree):
          """
              Recursively check the btree
          """
          check(htree)
          for key in htree.keys():
              if not htree.has_key(key):
                  raise AssertionError(
                      "Missing value for key: %s" % repr(key))
              else:
                ob = htree[key]
                if isinstance(ob, OOBTree):
                  hCheck(ob)
          return 1
        
        from BTrees.check import check
        path = '/'.join(self.getPhysicalPath())
        try:
            return hCheck(self._htree)
        except AssertionError:            
            LOG('HBTreeFolder2', WARNING,
                'Detected damage to %s. Fixing now.' % path,
                error=sys.exc_info())
            try:
                self._htree = OOBTree(self._htree) # XXX hFix needed
            except:
                LOG('HBTreeFolder2', ERROR, 'Failed to fix %s.' % path,
                    error=sys.exc_info())
                raise
            else:
                LOG('HBTreeFolder2', INFO, 'Fixed %s.' % path)
            return 0

    def hashId(self, id):
        """Return a tuple of ids
        """
        # XXX: why tolerate non-string ids ?
        id_list = str(id).split(H_SEPARATOR)     # We use '-' as the separator by default
        if len(id_list) > 1:
          return tuple(id_list)
        else:
          return [id,]
    
#         try:                             # We then try int hashing
#           id_int = int(id)
#         except ValueError:
#           return id_list
#         result = []
#         while id_int:
#           result.append(id_int % MAX_OBJECT_PER_LEVEL)
#           id_int = id_int / MAX_OBJECT_PER_LEVEL
#         result.reverse()
#         return tuple(result)

    def _getOb(self, id, default=_marker):
        """
            Return the named object from the folder.
        """
        htree = self._htree
        ob = htree
        id_list = self.hashId(id)
        for sub_id in id_list[0:-1]:
          if default is _marker:
            ob = ob[sub_id]
          else:
            ob = ob.get(sub_id, _marker)
            if ob is _marker:
              return default
        if default is _marker:
          ob = ob[id]
        else:
          ob = ob.get(id, _marker)
          if ob is _marker:
            return default
        return ob.__of__(self)

    def _setOb(self, id, object):
        """Store the named object in the folder.
        """
        htree = self._htree
        id_list = self.hashId(id)
        for idx in xrange(len(id_list) - 1):
          sub_id = id_list[idx]
          if not htree.has_key(sub_id):
            # Create a new level
            htree[sub_id] = OOBTree()
            if isinstance(sub_id, (int, long)):
              tree_id = 0
              for id in id_list[:idx+1]:
                  tree_id = tree_id + id * MAX_OBJECT_PER_LEVEL
            else:
              tree_id = H_SEPARATOR.join(id_list[:idx+1])
            # Index newly created level
            self._tree_list[tree_id] = None
            
          htree = htree[sub_id]

        if len(id_list) == 1 and not htree.has_key(None):
            self._tree_list[None] = None
        # set object in subtree            
        ob_id = id_list[-1]
        if htree.has_key(id):
            raise KeyError('There is already an item named "%s".' % id)
        htree[id] = object
        self._count.change(1)

    def _delOb(self, id):
        """Remove the named object from the folder.
        """
        htree = self._htree
        id_list = self.hashId(id)
        for sub_id in id_list[0:-1]:
          htree = htree[sub_id]
        del htree[id]
        self._count.change(-1)

    security.declareProtected(view_management_screens, 'getBatchObjectListing')
    def getBatchObjectListing(self, REQUEST=None):
        """Return a structure for a page template to show the list of objects.
        """
        if REQUEST is None:
            REQUEST = {}
        pref_rows = int(REQUEST.get('dtpref_rows', 20))
        b_start = int(REQUEST.get('b_start', 1))
        b_count = int(REQUEST.get('b_count', 1000))
        b_end = b_start + b_count - 1
        url = self.absolute_url() + '/manage_main'
        count = self.objectCount()

        if b_end < count:
            next_url = url + '?b_start=%d' % (b_start + b_count)
        else:
            b_end = count
            next_url = ''

        if b_start > 1:
            prev_url = url + '?b_start=%d' % max(b_start - b_count, 1)
        else:
            prev_url = ''

        formatted = [listtext0 % pref_rows]
        for optID in islice(self.objectIds(), b_start - 1, b_end):
            optID = escape(optID)
            formatted.append(listtext1 % (escape(optID, quote=1), optID))
        formatted.append(listtext2)
        return {'b_start': b_start, 'b_end': b_end,
                'prev_batch_url': prev_url,
                'next_batch_url': next_url,
                'formatted_list': ''.join(formatted)}


    security.declareProtected(view_management_screens,
                              'manage_object_workspace')
    def manage_object_workspace(self, ids=(), REQUEST=None):
        '''Redirects to the workspace of the first object in
        the list.'''
        if ids and REQUEST is not None:
            REQUEST.RESPONSE.redirect(
                '%s/%s/manage_workspace' % (
                self.absolute_url(), quote(ids[0])))
        else:
            return self.manage_main(self, REQUEST)


    security.declareProtected(access_contents_information,
                              'tpValues')
    def tpValues(self):
        """Ensures the items don't show up in the left pane.
        """
        return ()


    security.declareProtected(access_contents_information,
                              'objectCount')
    def objectCount(self):
        """Returns the number of items in the folder."""
        return self._count()


    security.declareProtected(access_contents_information, 'has_key')
    def has_key(self, id):
        """Indicates whether the folder has an item by ID.
        """
        htree = self._htree
        id_list = self.hashId(id)
        for sub_id in id_list[0:-1]:
          if not isinstance(htree, OOBTree):
            return 0
          if not htree.has_key(sub_id):
            return 0
          htree = htree[sub_id]
        if not htree.has_key(id):
          return 0
        return 1

    # Work around for the performance regression introduced in Zope 2.12.23.
    # Otherwise, we use superclass' __contains__ implementation, which uses
    # objectIds, which is inefficient in HBTreeFolder2 to lookup a single key.
    __contains__ = has_key

    def _htree_iteritems(self, min=None):
        # BUG: Due to bad design of HBTreeFolder2, buckets other than the root
        #      one must not contain both buckets & leafs. Otherwise, this method
        #      fails.
        h = self._htree
        recurse_stack = []
        try:
          for sub_id in min and self.hashId(min) or ('',):
            if recurse_stack:
              i.next()
              if type(h) is not OOBTree:
                break
              id += H_SEPARATOR + sub_id
              if type(h.itervalues().next()) is not OOBTree:
                sub_id = id
            else:
              id = sub_id
            i = h.iteritems(sub_id)
            recurse_stack.append(i)
            h = h[sub_id]
        except (KeyError, StopIteration):
          pass
        while recurse_stack:
          i = recurse_stack.pop()
          try:
            while 1:
              id, h = i.next()
              if type(h) is OOBTree:
                recurse_stack.append(i)
                i = h.iteritems()
              else:
                yield id, h
          except StopIteration:
            pass

    security.declareProtected(access_contents_information,
                              'treeIds')
    def treeIds(self, base_id=None):
        """ Return a list of subtree ids
        """
        tree = self._getTree(base_id=base_id)
        return [k for k, v in self._htree.items() if isinstance(v, OOBTree)]


    def _getTree(self, base_id):
        """ Return the tree wich has the base_id
        """
        htree = self._htree
        id_list = self.hashId(base_id)
        for sub_id in id_list:            
          if not isinstance(htree, OOBTree):
            return None
          if not htree.has_key(sub_id):
            raise IndexError, base_id
          htree = htree[sub_id]
        return htree

    def _getTreeIdList(self, htree=None):
        """ recursively build a list of btree ids
        """
        if htree is None:
          htree = self._htree
          btree_list = []
        else:
          btree_list = []
        for obj_id in htree.keys():
          obj = htree[obj_id]
          if isinstance(obj, OOBTree):
            btree_list.extend(["%s-%s"%(obj_id, x) for x in self._getTreeIdList(htree=obj)])
            btree_list.append(obj_id)

        return btree_list 

    security.declareProtected(access_contents_information,
                              'getTreeIdList')
    def getTreeIdList(self, htree=None):
        """ Return list of all tree ids
        """
        if self._tree_list is None or len(self._tree_list.keys()) == 0:
            tree_list = self._getTreeIdList(htree=htree)
            self._tree_list = PersistentMapping()
            for tree in tree_list:                
                self._tree_list[tree] = None
        return sorted(self._tree_list.keys())

    def _checkObjectId(self, ids):
        """ test id is not in btree id list
        """
        base_id, obj_id = ids
        if base_id is not None:
            obj_id = "%s%s%s" %(base_id, H_SEPARATOR, obj_id)
        return not self._tree_list.has_key(obj_id)
        
    security.declareProtected(access_contents_information,
                              'objectValues')
    def objectValues(self, base_id=_marker):
        return HBTreeObjectValues(self, base_id)

    security.declareProtected(access_contents_information,
                              'objectIds')
    def objectIds(self, base_id=_marker):
        return HBTreeObjectIds(self, base_id)

    security.declareProtected(access_contents_information,
                              'objectItems')
    def objectItems(self, base_id=_marker):
        # Returns a list of (id, subobject) tuples of the current object.
        # If 'spec' is specified, returns only objects whose meta_type match
        # 'spec'
        return HBTreeObjectItems(self, base_id)

    # superValues() looks for the _objects attribute, but the implementation
    # would be inefficient, so superValues() support is disabled.
    _objects = ()


    security.declareProtected(access_contents_information,
                              'objectIds_d')
    def objectIds_d(self, t=None):
        return dict.fromkeys(self.objectIds(t), 1)

    def _checkId(self, id, allow_dup=0):
        if not allow_dup and self.has_key(id):
            raise BadRequestException, ('The id "%s" is invalid--'
                                        'it is already in use.' % id)


    def _setObject(self, id, object, roles=None, user=None, set_owner=1):
        v=self._checkId(id)
        if v is not None: id=v

        # If an object by the given id already exists, remove it.
        if self.has_key(id):
            self._delObject(id)

        self._setOb(id, object)
        object = self._getOb(id)

        if set_owner:
            object.manage_fixupOwnershipAfterAdd()

            # Try to give user the local role "Owner", but only if
            # no local roles have been set on the object yet.
            if hasattr(object, '__ac_local_roles__'):
                if object.__ac_local_roles__ is None:
                    user=getSecurityManager().getUser()
                    if user is not None:
                        userid=user.getId()
                        if userid is not None:
                            object.manage_setLocalRoles(userid, ['Owner'])

        object.manage_afterAdd(object, self)
        return id


    def _delObject(self, id, dp=1):
        object = self._getOb(id)
        try:
            object.manage_beforeDelete(object, self)
        except BeforeDeleteException, ob:
            raise
        except ConflictError:
            raise
Exemple #40
0
class UnIndex(SimpleItem):
    """Simple forward and reverse index.
    """

    zmi_icon = 'fas fa-info-circle'
    _counter = None
    operators = ('or', 'and')
    useOperator = 'or'
    query_options = ()

    def __init__(self, id, ignore_ex=None, call_methods=None,
                 extra=None, caller=None):
        """Create an unindex

        UnIndexes are indexes that contain two index components, the
        forward index (like plain index objects) and an inverted
        index.  The inverted index is so that objects can be unindexed
        even when the old value of the object is not known.

        e.g.

        self._index = {datum:[documentId1, documentId2]}
        self._unindex = {documentId:datum}

        The arguments are:

          'id' -- the name of the item attribute to index.  This is
          either an attribute name or a record key.

          'ignore_ex' -- should be set to true if you want the index
          to ignore exceptions raised while indexing instead of
          propagating them.

          'call_methods' -- should be set to true if you want the index
          to call the attribute 'id' (note: 'id' should be callable!)
          You will also need to pass in an object in the index and
          uninded methods for this to work.

          'extra' -- a mapping object that keeps additional
          index-related parameters - subitem 'indexed_attrs'
          can be string with comma separated attribute names or
          a list

          'caller' -- reference to the calling object (usually
          a (Z)Catalog instance
        """

        def _get(o, k, default):
            """ return a value for a given key of a dict/record 'o' """
            if isinstance(o, dict):
                return o.get(k, default)
            else:
                return getattr(o, k, default)

        self.id = id
        self.ignore_ex = ignore_ex  # currently unimplemented
        self.call_methods = call_methods

        # allow index to index multiple attributes
        ia = _get(extra, 'indexed_attrs', id)
        if isinstance(ia, str):
            self.indexed_attrs = ia.split(',')
        else:
            self.indexed_attrs = list(ia)
        self.indexed_attrs = [
            attr.strip() for attr in self.indexed_attrs if attr]
        if not self.indexed_attrs:
            self.indexed_attrs = [id]

        self.clear()

    def __len__(self):
        return self._length()

    def getId(self):
        return self.id

    def clear(self):
        self._length = Length()
        self._index = OOBTree()
        self._unindex = IOBTree()

        if self._counter is None:
            self._counter = Length()
        else:
            self._increment_counter()

    def __nonzero__(self):
        return not not self._unindex

    def histogram(self):
        """Return a mapping which provides a histogram of the number of
        elements found at each point in the index.
        """
        histogram = {}
        for item in self._index.items():
            if isinstance(item, int):
                entry = 1  # "set" length is 1
            else:
                key, value = item
                entry = len(value)
            histogram[entry] = histogram.get(entry, 0) + 1
        return histogram

    def referencedObjects(self):
        """Generate a list of IDs for which we have referenced objects."""
        return self._unindex.keys()

    def getEntryForObject(self, documentId, default=_marker):
        """Takes a document ID and returns all the information we have
        on that specific object.
        """
        if default is _marker:
            return self._unindex.get(documentId)
        return self._unindex.get(documentId, default)

    def removeForwardIndexEntry(self, entry, documentId):
        """Take the entry provided and remove any reference to documentId
        in its entry in the index.
        """
        indexRow = self._index.get(entry, _marker)
        if indexRow is not _marker:
            try:
                indexRow.remove(documentId)
                if not indexRow:
                    del self._index[entry]
                    self._length.change(-1)
            except ConflictError:
                raise
            except AttributeError:
                # index row is an int
                try:
                    del self._index[entry]
                except KeyError:
                    # swallow KeyError because it was probably
                    # removed and then _length AttributeError raised
                    pass
                if isinstance(self.__len__, Length):
                    self._length = self.__len__
                    del self.__len__
                self._length.change(-1)
            except Exception:
                LOG.error('%(context)s: unindex_object could not remove '
                          'documentId %(doc_id)s from index %(index)r.  This '
                          'should not happen.', dict(
                              context=self.__class__.__name__,
                              doc_id=documentId,
                              index=self.id),
                          exc_info=sys.exc_info())
        else:
            LOG.error('%(context)s: unindex_object tried to '
                      'retrieve set %(entry)r from index %(index)r '
                      'but couldn\'t.  This should not happen.', dict(
                          context=self.__class__.__name__,
                          entry=entry,
                          index=self.id))

    def insertForwardIndexEntry(self, entry, documentId):
        """Take the entry provided and put it in the correct place
        in the forward index.

        This will also deal with creating the entire row if necessary.
        """
        indexRow = self._index.get(entry, _marker)

        # Make sure there's actually a row there already. If not, create
        # a set and stuff it in first.
        if indexRow is _marker:
            # We always use a set to avoid getting conflict errors on
            # multiple threads adding a new row at the same time
            self._index[entry] = IITreeSet((documentId, ))
            self._length.change(1)
        else:
            try:
                indexRow.insert(documentId)
            except AttributeError:
                # Inline migration: index row with one element was an int at
                # first (before Zope 2.13).
                indexRow = IITreeSet((indexRow, documentId))
                self._index[entry] = indexRow

    def index_object(self, documentId, obj, threshold=None):
        """ wrapper to handle indexing of multiple attributes """

        fields = self.getIndexSourceNames()
        res = 0
        for attr in fields:
            res += self._index_object(documentId, obj, threshold, attr)

        if res > 0:
            self._increment_counter()

        return res > 0

    def _index_object(self, documentId, obj, threshold=None, attr=''):
        """ index and object 'obj' with integer id 'documentId'"""
        returnStatus = 0

        # First we need to see if there's anything interesting to look at
        datum = self._get_object_datum(obj, attr)
        if datum is None:
            # Prevent None from being indexed. None doesn't have a valid
            # ordering definition compared to any other object.
            # BTrees 4.0+ will throw a TypeError
            # "object has default comparison" and won't let it be indexed.
            return 0

        # We don't want to do anything that we don't have to here, so we'll
        # check to see if the new and existing information is the same.
        oldDatum = self._unindex.get(documentId, _marker)
        if datum != oldDatum:
            if oldDatum is not _marker:
                self.removeForwardIndexEntry(oldDatum, documentId)
                if datum is _marker:
                    try:
                        del self._unindex[documentId]
                    except ConflictError:
                        raise
                    except Exception:
                        LOG.error('Should not happen: oldDatum was there, '
                                  'now its not, for document: %s', documentId)

            if datum is not _marker:
                self.insertForwardIndexEntry(datum, documentId)
                self._unindex[documentId] = datum

            returnStatus = 1

        return returnStatus

    def _get_object_datum(self, obj, attr):
        # self.id is the name of the index, which is also the name of the
        # attribute we're interested in.  If the attribute is callable,
        # we'll do so.
        try:
            datum = getattr(obj, attr)
            if safe_callable(datum):
                datum = datum()
        except (AttributeError, TypeError):
            datum = _marker
        return datum

    def _increment_counter(self):
        if self._counter is None:
            self._counter = Length()
        self._counter.change(1)

    def getCounter(self):
        """Return a counter which is increased on index changes"""
        return self._counter is not None and self._counter() or 0

    def numObjects(self):
        """Return the number of indexed objects."""
        return len(self._unindex)

    def indexSize(self):
        """Return the size of the index in terms of distinct values."""
        return len(self)

    def unindex_object(self, documentId):
        """ Unindex the object with integer id 'documentId' and don't
        raise an exception if we fail
        """
        unindexRecord = self._unindex.get(documentId, _marker)
        if unindexRecord is _marker:
            return None

        self._increment_counter()

        self.removeForwardIndexEntry(unindexRecord, documentId)
        try:
            del self._unindex[documentId]
        except ConflictError:
            raise
        except Exception:
            LOG.debug('Attempt to unindex nonexistent document'
                      ' with id %s', documentId, exc_info=True)

    def _apply_not(self, not_parm, resultset=None):
        index = self._index
        setlist = []
        for k in not_parm:
            s = index.get(k, None)
            if s is None:
                continue
            elif isinstance(s, int):
                s = IISet((s, ))
            setlist.append(s)
        return multiunion(setlist)

    def _convert(self, value, default=None):
        return value

    def getRequestCache(self):
        """returns dict for caching per request for interim results
        of an index search. Returns 'None' if no REQUEST attribute
        is available"""

        cache = None
        REQUEST = aq_get(self, 'REQUEST', None)
        if REQUEST is not None:
            catalog = aq_parent(aq_parent(aq_inner(self)))
            if catalog is not None:
                # unique catalog identifier
                key = '_catalogcache_{0}_{1}'.format(
                    catalog.getId(), id(catalog))
                cache = REQUEST.get(key, None)
                if cache is None:
                    cache = REQUEST[key] = RequestCache()

        return cache

    def getRequestCacheKey(self, record, resultset=None):
        """returns an unique key of a search record"""
        params = []

        # record operator (or, and)
        params.append(('operator', record.operator))

        # not / exclude operator
        not_value = record.get('not', None)
        if not_value is not None:
            not_value = frozenset(not_value)
            params.append(('not', not_value))

        # record options
        for op in ['range', 'usage']:
            op_value = record.get(op, None)
            if op_value is not None:
                params.append((op, op_value))

        # record keys
        rec_keys = frozenset(record.keys)
        params.append(('keys', rec_keys))

        # build record identifier
        rid = frozenset(params)

        # unique index identifier
        iid = '_{0}_{1}_{2}'.format(self.__class__.__name__,
                                    self.id, self.getCounter())
        return (iid, rid)

    def _apply_index(self, request, resultset=None):
        """Apply the index to query parameters given in the request arg.

        If the query does not match the index, return None, otherwise
        return a tuple of (result, used_attributes), where used_attributes
        is again a tuple with the names of all used data fields.
        """
        record = IndexQuery(request, self.id, self.query_options,
                            self.operators, self.useOperator)
        if record.keys is None:
            return None
        return (self.query_index(record, resultset=resultset), (self.id, ))

    def query_index(self, record, resultset=None):
        """Search the index with the given IndexQuery object.

        If the query has a key which matches the 'id' of
        the index instance, one of a few things can happen:

          - if the value is a string, turn the value into
            a single-element sequence, and proceed.

          - if the value is a sequence, return a union search.

          - If the value is a dict and contains a key of the form
            '<index>_operator' this overrides the default method
            ('or') to combine search results. Valid values are 'or'
            and 'and'.
        """
        index = self._index
        r = None
        opr = None

        # not / exclude parameter
        not_parm = record.get('not', None)

        operator = record.operator

        cachekey = None
        cache = self.getRequestCache()
        if cache is not None:
            cachekey = self.getRequestCacheKey(record)
            if cachekey is not None:
                cached = None
                if operator == 'or':
                    cached = cache.get(cachekey, None)
                else:
                    cached_setlist = cache.get(cachekey, None)
                    if cached_setlist is not None:
                        r = resultset
                        for s in cached_setlist:
                            # the result is bound by the resultset
                            r = intersection(r, s)
                            # If intersection, we can't possibly get a
                            # smaller result
                            if not r:
                                break
                        cached = r

                if cached is not None:
                    if isinstance(cached, int):
                        cached = IISet((cached, ))

                    if not_parm:
                        not_parm = list(map(self._convert, not_parm))
                        exclude = self._apply_not(not_parm, resultset)
                        cached = difference(cached, exclude)

                    return cached

        if not record.keys and not_parm:
            # convert into indexed format
            not_parm = list(map(self._convert, not_parm))
            # we have only a 'not' query
            record.keys = [k for k in index.keys() if k not in not_parm]
        else:
            # convert query arguments into indexed format
            record.keys = list(map(self._convert, record.keys))

        # Range parameter
        range_parm = record.get('range', None)
        if range_parm:
            opr = 'range'
            opr_args = []
            if range_parm.find('min') > -1:
                opr_args.append('min')
            if range_parm.find('max') > -1:
                opr_args.append('max')

        if record.get('usage', None):
            # see if any usage params are sent to field
            opr = record.usage.lower().split(':')
            opr, opr_args = opr[0], opr[1:]

        if opr == 'range':  # range search
            if 'min' in opr_args:
                lo = min(record.keys)
            else:
                lo = None
            if 'max' in opr_args:
                hi = max(record.keys)
            else:
                hi = None
            if hi:
                setlist = index.values(lo, hi)
            else:
                setlist = index.values(lo)

            # If we only use one key, intersect and return immediately
            if len(setlist) == 1:
                result = setlist[0]
                if isinstance(result, int):
                    result = IISet((result,))

                if cachekey is not None:
                    if operator == 'or':
                        cache[cachekey] = result
                    else:
                        cache[cachekey] = [result]

                if not_parm:
                    exclude = self._apply_not(not_parm, resultset)
                    result = difference(result, exclude)
                return result

            if operator == 'or':
                tmp = []
                for s in setlist:
                    if isinstance(s, int):
                        s = IISet((s,))
                    tmp.append(s)
                r = multiunion(tmp)

                if cachekey is not None:
                    cache[cachekey] = r
            else:
                # For intersection, sort with smallest data set first
                tmp = []
                for s in setlist:
                    if isinstance(s, int):
                        s = IISet((s,))
                    tmp.append(s)
                if len(tmp) > 2:
                    setlist = sorted(tmp, key=len)
                else:
                    setlist = tmp

                # 'r' is not invariant of resultset. Thus, we
                # have to remember 'setlist'
                if cachekey is not None:
                    cache[cachekey] = setlist

                r = resultset
                for s in setlist:
                    # the result is bound by the resultset
                    r = intersection(r, s)
                    # If intersection, we can't possibly get a smaller result
                    if not r:
                        break

        else:  # not a range search
            # Filter duplicates
            setlist = []
            for k in record.keys:
                if k is None:
                    # Prevent None from being looked up. None doesn't
                    # have a valid ordering definition compared to any
                    # other object. BTrees 4.0+ will throw a TypeError
                    # "object has default comparison".
                    continue
                try:
                    s = index.get(k, None)
                except TypeError:
                    # key is not valid for this Btree so the value is None
                    LOG.error(
                        '%(context)s: query_index tried '
                        'to look up key %(key)r from index %(index)r '
                        'but key was of the wrong type.', dict(
                            context=self.__class__.__name__,
                            key=k,
                            index=self.id,
                        )
                    )
                    s = None
                # If None, try to bail early
                if s is None:
                    if operator == 'or':
                        # If union, we can possibly get a bigger result
                        continue
                    # If intersection, we can't possibly get a smaller result
                    if cachekey is not None:
                        # If operator is 'and', we have to cache a list of
                        # IISet objects
                        cache[cachekey] = [IISet()]
                    return IISet()
                elif isinstance(s, int):
                    s = IISet((s,))
                setlist.append(s)

            # If we only use one key return immediately
            if len(setlist) == 1:
                result = setlist[0]
                if isinstance(result, int):
                    result = IISet((result,))

                if cachekey is not None:
                    if operator == 'or':
                        cache[cachekey] = result
                    else:
                        cache[cachekey] = [result]

                if not_parm:
                    exclude = self._apply_not(not_parm, resultset)
                    result = difference(result, exclude)
                return result

            if operator == 'or':
                # If we already get a small result set passed in, intersecting
                # the various indexes with it and doing the union later is
                # faster than creating a multiunion first.

                if resultset is not None and len(resultset) < 200:
                    smalllist = []
                    for s in setlist:
                        smalllist.append(intersection(resultset, s))
                    r = multiunion(smalllist)

                    # 'r' is not invariant of resultset.  Thus, we
                    # have to remember the union of 'setlist'. But
                    # this is maybe a performance killer. So we do not cache.
                    # if cachekey is not None:
                    #    cache[cachekey] = multiunion(setlist)

                else:
                    r = multiunion(setlist)
                    if cachekey is not None:
                        cache[cachekey] = r
            else:
                # For intersection, sort with smallest data set first
                if len(setlist) > 2:
                    setlist = sorted(setlist, key=len)

                # 'r' is not invariant of resultset. Thus, we
                # have to remember the union of 'setlist'
                if cachekey is not None:
                    cache[cachekey] = setlist

                r = resultset
                for s in setlist:
                    r = intersection(r, s)
                    # If intersection, we can't possibly get a smaller result
                    if not r:
                        break

        if isinstance(r, int):
            r = IISet((r, ))
        if r is None:
            return IISet()
        if not_parm:
            exclude = self._apply_not(not_parm, resultset)
            r = difference(r, exclude)
        return r

    def hasUniqueValuesFor(self, name):
        """has unique values for column name"""
        if name == self.id:
            return 1
        return 0

    def getIndexSourceNames(self):
        """Return sequence of indexed attributes."""
        return getattr(self, 'indexed_attrs', [self.id])

    def getIndexQueryNames(self):
        """Indicate that this index applies to queries for the index's name."""
        return (self.id,)

    def uniqueValues(self, name=None, withLengths=0):
        """returns the unique values for name

        if withLengths is true, returns a sequence of
        tuples of (value, length)
        """
        if name is None:
            name = self.id
        elif name != self.id:
            return

        if not withLengths:
            for key in self._index.keys():
                yield key
        else:
            for key, value in self._index.items():
                if isinstance(value, int):
                    yield (key, 1)
                else:
                    yield (key, len(value))

    def keyForDocument(self, id):
        # This method is superseded by documentToKeyMap
        return self._unindex[id]

    def documentToKeyMap(self):
        return self._unindex

    def items(self):
        items = []
        for k, v in self._index.items():
            if isinstance(v, int):
                v = IISet((v,))
            items.append((k, v))
        return items
Exemple #41
0
class ZODBGroupManager(BasePlugin):
    """ PAS plugin for managing groups, and groups of groups in the ZODB
    """
    meta_type = 'ZODB Group Manager'

    security = ClassSecurityInfo()

    def __init__(self, id, title=None):

        self._id = self.id = id
        self.title = title
        self._groups = OOBTree()
        self._principal_groups = OOBTree()

    #
    #   IGroupEnumerationPlugin implementation
    #
    security.declarePrivate('enumerateGroups')

    def enumerateGroups(self,
                        id=None,
                        title=None,
                        exact_match=False,
                        sort_by=None,
                        max_results=None,
                        **kw):
        """ See IGroupEnumerationPlugin.
        """
        group_info = []
        group_ids = []
        plugin_id = self.getId()

        if isinstance(id, str):
            id = [id]

        if isinstance(title, str):
            title = [title]

        if exact_match and (id or title):

            if id:
                group_ids.extend(id)
            elif title:
                group_ids.extend(title)

        if group_ids:
            group_filter = None

        else:  # Searching
            group_ids = self.listGroupIds()
            group_filter = _ZODBGroupFilter(id, title, **kw)

        for group_id in group_ids:

            if self._groups.get(group_id, None):
                e_url = '%s/manage_groups' % self.getId()
                p_qs = 'group_id=%s' % group_id
                m_qs = 'group_id=%s&assign=1' % group_id

                info = {}
                info.update(self._groups[group_id])

                info['pluginid'] = plugin_id
                info['properties_url'] = '%s?%s' % (e_url, p_qs)
                info['members_url'] = '%s?%s' % (e_url, m_qs)

                info['id'] = '%s%s' % (self.prefix, info['id'])

                if not group_filter or group_filter(info):
                    group_info.append(info)

        return tuple(group_info)

    #
    #   IGroupsPlugin implementation
    #
    security.declarePrivate('getGroupsForPrincipal')

    def getGroupsForPrincipal(self, principal, request=None):
        """ See IGroupsPlugin.
        """
        unadorned = self._principal_groups.get(principal.getId(), ())
        return tuple(['%s%s' % (self.prefix, x) for x in unadorned])

    #
    #   (notional)IZODBGroupManager interface
    #
    security.declareProtected(ManageGroups, 'listGroupIds')

    def listGroupIds(self):
        """ -> ( group_id_1, ... group_id_n )
        """
        return self._groups.keys()

    security.declareProtected(ManageGroups, 'listGroupInfo')

    def listGroupInfo(self):
        """ -> ( {}, ...{} )

        o Return one mapping per group, with the following keys:

          - 'id' 
        """
        return self._groups.values()

    security.declareProtected(ManageGroups, 'getGroupInfo')

    def getGroupInfo(self, group_id):
        """ group_id -> {}
        """
        return self._groups[group_id]

    security.declarePrivate('addGroup')

    def addGroup(self, group_id, title=None, description=None):
        """ Add 'group_id' to the list of groups managed by this object.

        o Raise KeyError on duplicate.
        """
        if self._groups.get(group_id) is not None:
            raise KeyError, 'Duplicate group ID: %s' % group_id

        self._groups[group_id] = {
            'id': group_id,
            'title': title,
            'description': description
        }

    security.declarePrivate('updateGroup')

    def updateGroup(self, group_id, title, description):
        """ Update properties for 'group_id'

        o Raise KeyError if group_id doesn't already exist.
        """
        self._groups[group_id].update({
            'title': title,
            'description': description
        })
        self._groups[group_id] = self._groups[group_id]

    security.declarePrivate('removeGroup')

    def removeGroup(self, group_id):
        """ Remove 'role_id' from the list of roles managed by this
            object, removing assigned members from it before doing so.

        o Raise KeyError if 'group_id' doesn't already exist.
        """
        for principal_id in self._principal_groups.keys():
            self.removePrincipalFromGroup(principal_id, group_id)
        del self._groups[group_id]

    #
    #   Group assignment API
    #
    security.declareProtected(ManageGroups, 'listAvailablePrincipals')

    def listAvailablePrincipals(self, group_id, search_id):
        """ Return a list of principal IDs to that can belong to the group.

        o If supplied, 'search_id' constrains the principal IDs;  if not,
          return empty list.

        o Omit principals with existing assignments.
        """
        result = []

        if search_id:  # don't bother searching if no criteria

            parent = aq_parent(self)

            for info in parent.searchPrincipals(max_results=20,
                                                sort_by='id',
                                                id=search_id,
                                                exact_match=False):
                id = info['id']
                title = info.get('title', id)
                if (group_id not in self._principal_groups.get(id, ())
                        and group_id != id):
                    result.append((id, title))

        return result

    security.declareProtected(ManageGroups, 'listAssignedPrincipals')

    def listAssignedPrincipals(self, group_id):
        """ Return a list of principal IDs belonging to a group.
        """
        result = []

        for k, v in self._principal_groups.items():
            if group_id in v:
                # should be one and only one mapping to 'k'

                parent = aq_parent(self)
                info = parent.searchPrincipals(id=k, exact_match=True)
                assert (len(info) in (0, 1))
                if len(info) == 0:
                    title = '<%s: not found>' % k
                else:
                    title = info[0].get('title', k)
                result.append((k, title))

        return result

    security.declareProtected(ManageGroups, 'addPrincipalToGroup')

    def addPrincipalToGroup(self, principal_id, group_id):
        """ Add a principal to a group.

        o Return a boolean indicating whether a new assignment was created.

        o Raise KeyError if 'group_id' is unknown.
        """
        group_info = self._groups[group_id]  # raise KeyError if unknown!

        current = self._principal_groups.get(principal_id, ())
        already = group_id in current

        if not already:
            new = current + (group_id, )
            self._principal_groups[principal_id] = new

        return not already

    security.declareProtected(ManageGroups, 'removePrincipalFromGroup')

    def removePrincipalFromGroup(self, principal_id, group_id):
        """ Remove a prinicpal from from a group.

        o Return a boolean indicating whether the principal was already 
          a member of the group.

        o Raise KeyError if 'group_id' is unknown.

        o Ignore requests to remove a principal if not already a member
          of the group.
        """
        group_info = self._groups[group_id]  # raise KeyError if unknown!

        current = self._principal_groups.get(principal_id, ())
        new = tuple([x for x in current if x != group_id])
        already = current != new

        if already:
            self._principal_groups[principal_id] = new

        return already

    #
    #   ZMI
    #
    manage_options = (({
        'label': 'Groups',
        'action': 'manage_groups',
    }, ) + BasePlugin.manage_options)

    security.declarePublic('manage_widgets')
    manage_widgets = PageTemplateFile('www/zuWidgets',
                                      globals(),
                                      __name__='manage_widgets')

    security.declareProtected(ManageGroups, 'manage_groups')
    manage_groups = PageTemplateFile('www/zgGroups',
                                     globals(),
                                     __name__='manage_groups')

    security.declareProtected(ManageGroups, 'manage_twoLists')
    manage_twoLists = PageTemplateFile('../www/two_lists',
                                       globals(),
                                       __name__='manage_twoLists')

    security.declareProtected(ManageGroups, 'manage_addGroup')

    def manage_addGroup(self,
                        group_id,
                        title=None,
                        description=None,
                        RESPONSE=None):
        """ Add a group via the ZMI.
        """
        self.addGroup(group_id, title, description)

        message = 'Group+added'

        if RESPONSE is not None:
            RESPONSE.redirect('%s/manage_groups?manage_tabs_message=%s' %
                              (self.absolute_url(), message))

    security.declareProtected(ManageGroups, 'manage_updateGroup')

    def manage_updateGroup(self, group_id, title, description, RESPONSE=None):
        """ Update a group via the ZMI.
        """
        self.updateGroup(group_id, title, description)

        message = 'Group+updated'

        if RESPONSE is not None:
            RESPONSE.redirect('%s/manage_groups?manage_tabs_message=%s' %
                              (self.absolute_url(), message))

    security.declareProtected(ManageGroups, 'manage_removeGroups')

    def manage_removeGroups(self, group_ids, RESPONSE=None):
        """ Remove one or more groups via the ZMI.
        """
        group_ids = filter(None, group_ids)

        if not group_ids:
            message = 'no+groups+selected'

        else:

            for group_id in group_ids:
                self.removeGroup(group_id)

            message = 'Groups+removed'

        if RESPONSE is not None:
            RESPONSE.redirect('%s/manage_groups?manage_tabs_message=%s' %
                              (self.absolute_url(), message))

    security.declareProtected(ManageGroups, 'manage_addPrincipalsToGroup')

    def manage_addPrincipalsToGroup(self,
                                    group_id,
                                    principal_ids,
                                    RESPONSE=None):
        """ Add one or more principals to a group via the ZMI.
        """
        assigned = []

        for principal_id in principal_ids:
            if self.addPrincipalToGroup(principal_id, group_id):
                assigned.append(principal_id)

        if not assigned:
            message = 'Principals+already+members+of+%s' % group_id
        else:
            message = '%s+added+to+%s' % ('+'.join(assigned), group_id)

        if RESPONSE is not None:
            RESPONSE.redirect(('%s/manage_groups?group_id=%s&assign=1' +
                               '&manage_tabs_message=%s') %
                              (self.absolute_url(), group_id, message))

    security.declareProtected(ManageGroups, 'manage_removePrincipalsFromGroup')

    def manage_removePrincipalsFromGroup(self,
                                         group_id,
                                         principal_ids,
                                         RESPONSE=None):
        """ Remove one or more principals from a group via the ZMI.
        """
        removed = []

        for principal_id in principal_ids:
            if self.removePrincipalFromGroup(principal_id, group_id):
                removed.append(principal_id)

        if not removed:
            message = 'Principals+not+in+group+%s' % group_id
        else:
            message = 'Principals+%s+removed+from+%s' % ('+'.join(removed),
                                                         group_id)

        if RESPONSE is not None:
            RESPONSE.redirect(('%s/manage_groups?group_id=%s&assign=1' +
                               '&manage_tabs_message=%s') %
                              (self.absolute_url(), group_id, message))
class LanguageIndex(SimpleItem, PropertyManager):

    _properties = (dict(id='fallback', type='boolean', mode='w'), )

    meta_type = 'LanguageIndex'

    manage_options = PropertyManager.manage_options + (dict(
        label='Histogram', action='manage_histogram'), )

    security = ClassSecurityInfo()

    security.declareProtected(Permissions.manage_zcatalog_indexes,
                              'manage_histogram')
    manage_histogram = PageTemplateFile('www/indexHistogram.pt',
                                        GLOBALS,
                                        __name__='manage_histogram')

    query_options = ('query', 'fallback')
    fallback = True

    def __init__(self, id, fallback=True, extra=None, caller=None):
        self.id = id
        # 'extra' is used by the twisted ZCatalog addIndex machinery
        self.fallback = extra and extra.fallback or fallback
        self.clear()

    # IPluggableIndex implementation

    security.declarePrivate('getEntryForObject')

    def getEntryForObject(self, documentId, default=None):
        """Return the documentId entry"""
        return self._unindex.get(documentId, default)

    security.declarePrivate('getIndexSourceNames')

    def getIndexSourceNames(self):
        """The attributes we index"""
        # Not configurable; only GS uses this
        return None

    security.declarePrivate('index_object')

    def index_object(self, documentId, obj, treshold=None):
        """Index the object"""
        if not ITranslatable.providedBy(obj):
            if IIndexableObjectWrapper.providedBy(obj):
                # wrapped object in `plone.indexer`
                wrapped = getattr(obj, '_IndexableObjectWrapper__object', None)
                # XXX: the rest can probably go now...
                # Wrapper doesn't proxy __implements__
                if wrapped is None:
                    wrapped = getattr(obj, '_IndexableObjectWrapper__ob', None)
                # Older CMFPlone
                if wrapped is None:
                    wrapped = getattr(obj, '_obj', None)
                if wrapped is None:
                    return 0
                obj = wrapped

        try:
            language = obj.Language
            if callable(language):
                language = language()
        except AttributeError:
            return 0

        if ITranslatable.providedBy(obj):
            canonical = obj.getCanonical()
            # Gracefully deal with broken references
            if canonical is None:
                return 0
            cid = canonical.UID()
        else:
            # Also index non-translatable content, otherwise
            # LinguaPlone only shows translatable content.
            # This assumes a catalog documentId will never
            # be equal to a UID.
            cid = documentId

        if documentId not in self._unindex:
            self._length.change(1)
        else:
            self._remove(self._unindex[documentId])

        main, sub = splitLanguage(language)
        entry = IndexEntry(documentId, main, sub, cid)
        self._insert(entry)
        self._unindex[documentId] = entry
        self._sortindex[documentId] = str(entry)

        return 1

    security.declarePrivate('unindex_object')

    def unindex_object(self, documentId):
        """Remove indexed information"""
        entry = self._unindex.get(documentId, None)

        if entry is None:
            LOG.debug('Attempt to unindex document with id %s failed' %
                      documentId)
            return

        self._remove(entry)

        self._length.change(-1)
        del self._unindex[documentId]
        del self._sortindex[documentId]

    security.declarePrivate('_apply_index')

    def _apply_index(self, request, cid=''):
        """Apply the index to the search parameters given in request"""

        record = parseIndexRequest(request, self.id, self.query_options)
        if record.keys is None:
            return None

        result = None
        fallback = self.fallback
        if hasattr(record, 'fallback'):
            fallback = bool(record.fallback)

        for language in record.keys:
            rows = self._search(language, fallback)
            result = ii_union(result, rows)

        return (result or IISet()), ('Language', )

    security.declareProtected(Permissions.manage_zcatalog_indexes,
                              'numObjects')

    def numObjects(self):
        """Return the number of indexed objects"""
        return len(self)

    indexSize = numObjects

    security.declareProtected(Permissions.manage_zcatalog_indexes, 'clear')

    def clear(self):
        """Clear the index"""
        self._index = OOBTree()
        self._unindex = IOBTree()
        self._sortindex = IOBTree()
        self._length = Length()

    # IUniqueValueIndex implementation

    security.declarePrivate('hasUniqueValuesFor')

    def hasUniqueValuesFor(self, name):
        """Return true if the index can return the unique values for name"""
        # Never actually used anywhere in the Zope and Plone codebases..
        return name == self.id

    security.declareProtected(Permissions.manage_zcatalog_indexes,
                              'uniqueValues')

    def uniqueValues(self, name=None, withLengths=False):
        """Return the unique values for name.

        If 'withLengths' is true, returns a sequence of tuples of
        (value, length).

        """
        if name is not None and name != self.id:
            # Never actually used anywhere in the Zope and Plone codebases..
            return ()

        def makeTag(main, sub):
            return '-'.join(filter(None, (main, sub)))

        if withLengths:
            return tuple((makeTag(m, s), len(entries))
                         for (m, subs) in self._index.items()
                         for (s, entries) in subs.items())
        else:
            return tuple(
                makeTag(m, s) for (m, subs) in self._index.items()
                for s in subs.keys())

    # ISortIndex implementation

    security.declarePrivate('keyForDocument')

    def keyForDocument(self, documentId):
        """Deprecated"""
        return self._sortindex[documentId]

    security.declarePrivate('documentToKeyMap')

    def documentToKeyMap(self):
        """Map id to language tag"""
        return self._sortindex

    # Internal operations

    security.declarePrivate('__len__')

    def __len__(self):
        return self._length()

    security.declarePrivate('_insert')

    def _insert(self, entry):
        if entry.main not in self._index:
            self._index[entry.main] = OOBTree()
        if entry.sub not in self._index[entry.main]:
            self._index[entry.main][entry.sub] = OOTreeSet()

        self._index[entry.main][entry.sub].insert(entry)

    security.declarePrivate('_remove')

    def _remove(self, entry):
        main = self._index.get(entry.main, _marker)
        if main is _marker:
            return

        # XXX I get many spurious errors on trying to remove the entry here,
        # which is strange. If the entry exists in _unindex, it should
        # be in _index[entry.main][entry.sub] as well.
        # I've put a test around it now, but this might hide a deeper problem.
        # //regebro
        if entry in self._index[entry.main][entry.sub]:
            self._index[entry.main][entry.sub].remove(entry)
        else:
            LOG.warning("entry %s existed in _unindex "
                        "but not in _index." % str(entry))

        if not self._index[entry.main][entry.sub]:
            del self._index[entry.main][entry.sub]
        if not self._index[entry.main]:
            del self._index[entry.main]

    security.declarePrivate('_search')

    def _search(self, language, fallback=True):
        main, sub = splitLanguage(language)

        if main not in self._index:
            return None

        if fallback:
            # Search in sorted order, specific sub tag first, None second
            subs = list(self._index[main].keys())
            subs.sort()
            if sub in subs:
                subs.remove(sub)
                subs.insert(0, sub)
        else:
            subs = [sub]

        result = OOSet()

        for sublanguage in subs:
            result = oo_union(result, self._index[main][sublanguage])

        return IISet(entry.docid for entry in result)
Exemple #43
0
class IndexIR(object):
    """Vocabulary and Inverted Index Data Structures for Information Retrieval"""
    
    def __init__(self, top_dir="", index_on_memory=False, skip_pointers=False, d_gaps=False):
        self._vocabulary = OOBTree()
        self._parent_dir = join(top_dir,INDEXIR_DIR)
        if (not exists(self._parent_dir)):
            mkdir(self._parent_dir)
        self._index_fpath = join(self._parent_dir, INDEXIR_INV)
        if (not exists(self._index_fpath)):
            # Si no existe el index creamos uno en memoria
            self._index_buffer = BytesIO(b"")
            self._in_memory = True
        else:
            # Si existe tomamos el index cargado de disco
            if (index_on_memory):
                self._load_index()
            self._in_memory = index_on_memory
        self._vocab_fpath = join(self._parent_dir, VOCABULARY)
        self._config_fpath = join(self._parent_dir, CONFIG_DIR)
        self._int_sz = calcsize(f"I")
        self._plist_item_sz = self._int_sz * 2
        self._config = {
            "SKIP_POINTERS": skip_pointers
            "DGAPS": d_gaps
        }
        if (self.index_exists()):
            self._vocabulary = self._load_vocab()
            self._config = self._load_conf()


    def _pack_plist(self, plist):
        if (self._config['DGAPS']):
            plist = self._delta_encode(plist)
        data = [e for entries in plist for e in entries]
        return pack(f"{len(data)}I", *data)


    def _calculate_len_plist(self, len_packed_data):
        l = len_packed_data
        a = 4
        b = -4*(1+l)-1 
        c = (1+l)**2
        # Las listas con skip_pointers responden a la siguiente función
        f = lambda x: 2*x + sqrt(x) - 1
        s1 = ((-1*b) + sqrt(b**2 - 4*a*c))/(2*a)
        s2 = ((-1*b) - sqrt(b**2 - 4*a*c))/(2*a)
        if (f(s1) == l):
            return ceil(s1)
        elif (f(s2) == l):
            return ceil(s2)


    def _unpack_plist(self, plist):
        is_skip = lambda i, l: i % (l//int(sqrt(l) - 1)) == 0 and (i + (l//int(sqrt(l) - 1))) < l
        data = unpack(f"{len(plist)//self._int_sz}I", plist)
        res = []
        if (self._config['SKIP_POINTERS']):
            i = 0
            len_d = self._calculate_len_plist(len(data))
            j = 0
            while i < len(data)-1:
                entry = []
                entry.append(data[i])
                entry.append(data[i+1])
                if (is_skip(j, len_d)):
                    entry.append(data[i+2])
                    i += 1
                i += 2
                j += 1
                res.append(entry)
        else:
            res = [(data[i],data[i+1]) for i in range(0,len(data),2)]
        if (self._config['DGAPS']):
            res = self._delta_decode(res)
        return res


    def _add_skip_pointers(self):
        if (exists(self._index_fpath)):
            raise Exception(f"An index alredy exists. Delete the folder {self._index_fpath}")
        prev_skip = lambda i, l: (i - (l//int(sqrt(l) - 1)))
        is_prev_skip = lambda i, l: (i % (l//int(sqrt(l) - 1)) == 0 and i != 0) if int(sqrt(l) - 1) > 0 else False
        if (self._config['SKIP_POINTERS']):
            for term in self._vocabulary:
                l = len(self._vocabulary[term]['plist'])
                for i in range(len(self._vocabulary[term]['plist'])):
                    if (is_prev_skip(i, l)):
                        pi = prev_skip(i, l)
                        print(f"Adding pointer to {pi} with value {i}")
                        doc_id = self._vocabulary[term]['plist'][pi][0]
                        tf = self._vocabulary[term]['plist'][pi][1]
                        self._vocabulary[term]['plist'][pi] = (doc_id, tf, i)


    def add_entry(self, term, id_doc):
        if (not term in self._vocabulary):
            self._vocabulary[term] = {"df": 0, "tf": 0, "plist": []}
        plist = self._vocabulary[term]['plist']
        tup = (id_doc, 1)
        indx = None
        for i in range(len(plist)):
            if (plist[i][0] == id_doc):
                indx = i
                break
        if (indx is not None):
            tup = (plist[indx][0], plist[indx][1]+1)
            self._vocabulary[term]['plist'][indx] = tup
        else:
            self._vocabulary[term]['plist'].append(tup)
            self._vocabulary[term]['df'] += 1
        self._vocabulary[term]['tf'] += 1


    def flush_index(self):
        for term in self._vocabulary:
            packed_tups = self._pack_plist(self._vocabulary[term]['plist'])
            self._index_buffer.seek(0, 2) # SEEK_END
            self._vocabulary[term]['offset'] = self._index_buffer.tell()
            self._index_buffer.write(packed_tups)


    def get_plist(self, term):
        if (term in self._vocabulary):
            if (self._in_memory):
                self._index_buffer.seek(self._vocabulary[term]['offset'])
                if (self._config['SKIP_POINTERS']):
                    packed_list = self._index_buffer.read(self._vocabulary[term]['df'] * self._plist_item_sz + self._int_sz * int(sqrt(self._vocabulary[term]['df']) - 1))
                else:
                    packed_list = self._index_buffer.read(self._vocabulary[term]['df'] * self._plist_item_sz)
                return self._unpack_plist(packed_list)
            else:
                with open(self._index_fpath, "rb") as f:
                    f.seek(self._vocabulary[term]['offset'])
                    if (self._config['SKIP_POINTERS']):
                        packed_list = f.read(self._vocabulary[term]['df'] * self._plist_item_sz + self._int_sz * int(sqrt(self._vocabulary[term]['df']) - 1))
                    else:
                        packed_list = f.read(self._vocabulary[term]['df'] * self._plist_item_sz)
                    return self._unpack_plist(packed_list)
        else:
            return []


    def get_plist_intersect(self, t1, t2):
        result = { term: [] for term in [t1,t2] }
        p1 = self.get_plist(t1)
        p2 = self.get_plist(t2)
        i1 = 0
        i2 = 0
        has_skip = lambda i, l: len(l[i]) == 3
        next_skip = lambda i, l: l[i][2]
        while (i1 < len(p1) and i2 < len(p2)):
            import pdb; pdb.set_trace()
            if p1[i1][0] == p2[i2][0]:
                result[t1] += [p1[i1]]
                result[t2] += [p2[i2]]
                i1 += 1
                i2 += 1
            elif p1[i1][0] < p2[i2][0]:
                f = False
                while has_skip(i1,p1):
                    if p1[next_skip(i1,p1)][0] < p2[i2][0]:
                        i1 = next_skip(i1,p1)
                        f = True
                    else:
                        if (not f):
                            i1 += 1
                        break
                else:  
                    i1 += 1
            else:
                f = False
                while has_skip(i2,p2):
                    if p2[next_skip(i2,p2)][0] < p1[i1][0]:
                        i2 = next_skip(i2,p2)
                        f = True
                    else:
                        if (not f):
                            i2 += 1
                        break
                else:  
                    i2 += 1
        return result.values()


    def _load_conf(self):
        with open(self._config_fpath, 'r') as myfile:
            return load(myfile)

    
    def _dump_conf(self):
        with open(self._config_fpath, 'w') as f:
            dump(self._config, f, indent=4, ensure_ascii=False)


    def _load_vocab(self):
        with open(self._vocab_fpath, 'r') as myfile:
            return load(myfile)


    def _dump_vocab(self):
        sorted_data = sorted(self._vocabulary.items(), key=lambda kv: kv[0])
        data = { i[0]: { "df": i[1]["df"], "tf": i[1]["tf"], "offset": i[1]["offset"] } for i in sorted_data}
        with open(self._vocab_fpath, 'w') as f:
            dump(data, f, indent=None, separators=(',',':'), ensure_ascii=False)


    def _load_index(self):
        with open(self._index_fpath, "rb") as f:
            self._index_buffer = BytesIO(b"")
            self._index_buffer.write(f.read())


    def _dump_index(self):
        with open(self._index_fpath, "wb") as f:
            f.write(self._index_buffer.getvalue())


    def indexing_ready(self):
        self._add_skip_pointers()
        self.flush_index()
        self._dump_vocab()
        self._dump_index()
        self._dump_conf()

    
    def index_exists(self):
        return isfile(self._index_fpath) and isfile(self._vocab_fpath) and isfile(self._config_fpath)
                

    def dump_json(self, to_file, indent=4):
        sorted_data = sorted(self._vocabulary.items(), key=lambda kv: kv[0])
        data = { i[0]: i[1] for i in sorted_data}
        with open(to_file, 'w') as f:
            dump(data, f, indent=indent, ensure_ascii=False)


    def _pack_plist(self, plist):
        if (self._d_gaps):
            plist = self._delta_encode(plist)
        data = [e for entries in plist for e in entries]
        return pack(f"{len(data)}I", *data)


    def _unpack_plist(self, plist):
        data = unpack(f"{len(plist)//self._int_sz}I", plist)
        plist = [(data[i],data[i+1]) for i in range(0,len(data),2)]
        if (self._d_gaps):
            plist = self._delta_decode(plist)
        return plist

    
    def _delta_encode(self, data):
        res = [data[0]]
        prev_doc_id = data[0][0]
        for i in range(1,data):
            doc_id = data[i][0]
            tf = data[i][1]
            if len(data[i]) == 3:
                sk = data[i][2]
                res += [(doc_id - prev_doc_id, tf, sk)]
            else
                res += [(doc_id - prev_doc_id, tf)]
            prev_doc_id = doc_id
        return res


    def _delta_decode(self, data):
        res = [data[0]]
        prev_doc_id = data[0][0]
        for i in range(1,data):
            delta = data[i][0]
            doc_id = delta + prev_doc_id
            tf = data[i][1]
            if len(data[i]) == 3:
                sk = data[i][2]
                res += [(doc_id, tf, sk)]
            else:
                res += [(doc_id, tf)]
            prev_doc_id = doc_id
        return res
Exemple #44
0
class Folder(Persistent):
    """ A folder implementation which acts much like a Python dictionary.

    keys are Unicode strings; values are arbitrary Python objects.
    """

    # _num_objects=None below is b/w compat for older instances of
    # folders which don't have a BTrees.Length object as a
    # _num_objects attribute.
    _num_objects = None

    __name__ = None
    __parent__ = None

    # Default uses ordering of underlying BTree.
    _order = None

    def _get_order(self):
        if self._order is not None:
            return list(self._order)
        return self.data.keys()

    def _set_order(self, value):
        # XXX:  should we test against self.data.keys()?
        self._order = tuple([unicodify(x) for x in value])

    def _del_order(self):
        del self._order

    order = property(_get_order, _set_order, _del_order)

    def __init__(self, data=None):
        if data is None:
            data = {}
        self.data = OOBTree(data)
        self._num_objects = Length(len(data))

    def keys(self):
        """ See IFolder.
        """
        return self.order

    def __iter__(self):
        return iter(self.order)

    def values(self):
        """ See IFolder.
        """
        if self._order is not None:
            return [self.data[name] for name in self.order]
        return self.data.values()

    def items(self):
        """ See IFolder.
        """
        if self._order is not None:
            return [(name, self.data[name]) for name in self.order]
        return self.data.items()

    def __len__(self):
        """ See IFolder.
        """
        if self._num_objects is None:
            # can be arbitrarily expensive
            return len(self.data)
        return self._num_objects()

    def __nonzero__(self):
        """ See IFolder.
        """
        return True

    __bool__ = __nonzero__

    def __getitem__(self, name):
        """ See IFolder.
        """
        name = unicodify(name)
        return self.data[name]

    def get(self, name, default=None):
        """ See IFolder.
        """
        name = unicodify(name)
        return self.data.get(name, default)

    def __contains__(self, name):
        """ See IFolder.
        """
        name = unicodify(name)
        return self.data.has_key(name)

    def __setitem__(self, name, other):
        """ See IFolder.
        """
        return self.add(name, other)

    def add(self, name, other, send_events=True):
        """ See IFolder.
        """
        if not isinstance(name, string_types):
            raise TypeError("Name must be a string rather than a %s" %
                            name.__class__.__name__)
        if not name:
            raise TypeError("Name must not be empty")

        name = unicodify(name)

        if self.data.has_key(name):
            raise KeyError('An object named %s already exists' % name)

        if send_events:
            objectEventNotify(ObjectWillBeAddedEvent(other, self, name))
        other.__parent__ = self
        other.__name__ = name

        # backwards compatibility: add a Length _num_objects to folders that
        # have none
        if self._num_objects is None:
            self._num_objects = Length(len(self.data))

        self.data[name] = other
        self._num_objects.change(1)

        if self._order is not None:
            self._order += (name, )

        if send_events:
            objectEventNotify(ObjectAddedEvent(other, self, name))

    def __delitem__(self, name):
        """ See IFolder.
        """
        return self.remove(name)

    def remove(self, name, send_events=True):
        """ See IFolder.
        """
        name = unicodify(name)
        other = self.data[name]

        if send_events:
            objectEventNotify(ObjectWillBeRemovedEvent(other, self, name))

        if hasattr(other, '__parent__'):
            del other.__parent__

        if hasattr(other, '__name__'):
            del other.__name__

        # backwards compatibility: add a Length _num_objects to folders that
        # have none
        if self._num_objects is None:
            self._num_objects = Length(len(self.data))

        del self.data[name]
        self._num_objects.change(-1)

        if self._order is not None:
            self._order = tuple([x for x in self._order if x != name])

        if send_events:
            objectEventNotify(ObjectRemovedEvent(other, self, name))

        return other

    def pop(self, name, default=marker):
        """ See IFolder.
        """
        try:
            result = self.remove(name)
        except KeyError:
            if default is marker:
                raise
            return default
        return result

    def __repr__(self):
        klass = self.__class__
        classname = '%s.%s' % (klass.__module__, klass.__name__)
        return '<%s object %r at %#x>' % (classname, self.__name__, id(self))
Exemple #45
0
class UnIndex(Persistent, Implicit, SimpleItem):
    """UnIndex object interface"""


    def __init__(self, id, ignore_ex=None, call_methods=None):
        """Create an unindex

        UnIndexes are indexes that contain two index components, the
        forward index (like plain index objects) and an inverted
        index.  The inverted index is so that objects can be unindexed
        even when the old value of the object is not known.

        e.g.

        self._index = {datum:[documentId1, documentId2]}
        self._unindex = {documentId:datum}

        If any item in self._index has a length-one value, the value is an
        integer, and not a set.  There are special cases in the code to deal
        with this.

        The arguments are:

          'id' -- the name of the item attribute to index.  This is
          either an attribute name or a record key.

          'ignore_ex' -- should be set to true if you want the index
          to ignore exceptions raised while indexing instead of
          propagating them.

          'call_methods' -- should be set to true if you want the index
          to call the attribute 'id' (note: 'id' should be callable!)
          You will also need to pass in an object in the index and
          uninded methods for this to work.

        """

        self.id = id
        self.ignore_ex=ignore_ex        # currently unimplimented
        self.call_methods=call_methods

        # experimental code for specifing the operator
        self.operators = ['or','and']
        self.useOperator = 'or'

        self.__len__=BTrees.Length.Length() # see __len__ method docstring
        self.clear()

    def getId(self): return self.id

    def clear(self):
        # inplace opportunistic conversion from old-style to new style BTrees
        try: self.__len__.set(0)
        except AttributeError: self.__len__=BTrees.Length.Length()
        self._index = OOBTree()
        self._unindex = IOBTree()

    def _convertBTrees(self, threshold=200):
        if type(self._index) is OOBTree: return

        from BTrees.convert import convert

        _index=self._index
        self._index=OOBTree()

        def convertSet(s,
                       IITreeSet=IITreeSet, IntType=type(0),
                       type=type, len=len,
                       doneTypes = (IntType, IITreeSet)):

            if type(s) in doneTypes: return s

            if len(s) == 1:
                try: return s[0]  # convert to int
                except: pass # This is just an optimization.

            return IITreeSet(s)

        convert(_index, self._index, threshold, convertSet)

        _unindex=self._unindex
        self._unindex=IOBTree()
        convert(_unindex, self._unindex, threshold)

        self.__len__=BTrees.Length.Length(len(_index))

    def __nonzero__(self):
        return not not self._unindex

    def __len__(self):
        """Return the number of objects indexed.

        This method is only called for indexes which have "old" BTrees,
        and the *only* reason that UnIndexes maintain a __len__ is for
        the searching code in the catalog during sorting.
        """
        return len(self._unindex)

    def histogram(self):
        """Return a mapping which provides a histogram of the number of
        elements found at each point in the index."""

        histogram = {}
        for item in self._index.items():
            if type(item) is IntType:
                entry = 1 # "set" length is 1
            else:
                key, value = item
                entry = len(value)
            histogram[entry] = histogram.get(entry, 0) + 1

        return histogram


    def referencedObjects(self):
        """Generate a list of IDs for which we have referenced objects."""
        return self._unindex.keys()


    def getEntryForObject(self, documentId, default=_marker):
        """Takes a document ID and returns all the information we have
        on that specific object."""
        if default is _marker:
            return self._unindex.get(documentId)
        else:
            return self._unindex.get(documentId, default)


    def removeForwardIndexEntry(self, entry, documentId):
        """Take the entry provided and remove any reference to documentId
        in its entry in the index."""
        global _marker
        indexRow = self._index.get(entry, _marker)
        if indexRow is not _marker:
            try:
                indexRow.remove(documentId)
                if not indexRow:
                    del self._index[entry]
                    try: self.__len__.change(-1)
                    except AttributeError: pass # pre-BTrees-module instance
            except AttributeError:
                # index row is an int
                del self._index[entry]
                try: self.__len__.change(-1)
                except AttributeError: pass # pre-BTrees-module instance
            except:
                LOG(self.__class__.__name__, ERROR,
                    ('unindex_object could not remove '
                     'documentId %s from index %s.  This '
                     'should not happen.'
                     % (str(documentId), str(self.id))), '',
                    sys.exc_info())
        else:
            LOG(self.__class__.__name__, ERROR,
                ('unindex_object tried to retrieve set %s '
                 'from index %s but couldn\'t.  This '
                 'should not happen.' % (repr(entry), str(self.id))))


    def insertForwardIndexEntry(self, entry, documentId):
        """Take the entry provided and put it in the correct place
        in the forward index.

        This will also deal with creating the entire row if necessary."""
        global _marker
        indexRow = self._index.get(entry, _marker)

        # Make sure there's actually a row there already.  If not, create
        # an IntSet and stuff it in first.
        if indexRow is _marker:
            self._index[entry] = documentId
            try:  self.__len__.change(1)
            except AttributeError: pass # pre-BTrees-module instance
        else:
            try: indexRow.insert(documentId)
            except AttributeError:
                # index row is an int
                indexRow=IITreeSet((indexRow, documentId))
                self._index[entry] = indexRow

    def index_object(self, documentId, obj, threshold=None):
        """ index and object 'obj' with integer id 'documentId'"""
        global _marker
        returnStatus = 0

        # First we need to see if there's anything interesting to look at
        datum = self._get_object_datum(obj)

        # We don't want to do anything that we don't have to here, so we'll
        # check to see if the new and existing information is the same.
        oldDatum = self._unindex.get(documentId, _marker)
        if datum != oldDatum:
            if oldDatum is not _marker:
                self.removeForwardIndexEntry(oldDatum, documentId)
                if datum is _marker:
                    try:
                        del self._unindex[documentId]
                    except:
                        LOG('UnIndex', ERROR,
                            'Should not happen: oldDatum was there, now its not,'
                            'for document with id %s' % documentId)

            if datum is not _marker:
                self.insertForwardIndexEntry(datum, documentId)
                self._unindex[documentId] = datum

            returnStatus = 1

        return returnStatus

    def _get_object_datum(self,obj):
        # self.id is the name of the index, which is also the name of the
        # attribute we're interested in.  If the attribute is callable,
        # we'll do so.
        try:
            datum = getattr(obj, self.id)
            if callable(datum):
                datum = datum()
        except AttributeError:
            datum = _marker
        return datum

    def numObjects(self):
        """ return number of indexed objects """
        return len(self._unindex)


    def unindex_object(self, documentId):
        """ Unindex the object with integer id 'documentId' and don't
        raise an exception if we fail """

        global _marker
        unindexRecord = self._unindex.get(documentId, _marker)
        if unindexRecord is _marker:
            return None

        self.removeForwardIndexEntry(unindexRecord, documentId)

        try:
            del self._unindex[documentId]
        except:
            LOG('UnIndex', ERROR, 'Attempt to unindex nonexistent document'
                ' with id %s' % documentId)

    def _apply_index(self, request, cid='', type=type, None=None):
Exemple #46
0
class DataBucketStream(Document):
    """
  Represents data stored in many small files inside a "stream".
  Each file is "addressed" by its key similar to dict.
  """

    meta_type = 'ERP5 Data Bucket Stream'
    portal_type = 'Data Bucket Stream'
    add_permission = Permissions.AddPortalContent

    # Declarative security
    security = ClassSecurityInfo()
    security.declareObjectProtected(Permissions.AccessContentsInformation)

    # Declarative properties
    property_sheets = (PropertySheet.CategoryCore, PropertySheet.SortIndex)

    def __init__(self, id, **kw):
        self.initBucketTree()
        self.initIndexTree()
        Document.__init__(self, id, **kw)

    def __len__(self):
        return len(self._tree)

    def initBucketTree(self):
        """
      Initialize the Bucket Tree
    """
        self._tree = OOBTree()

    def initIndexTree(self):
        """
      Initialize the Index Tree
    """
        self._long_index_tree = LOBTree()

    def getMaxKey(self, key=None):
        """
    Return the maximum key
    """
        try:
            return self._tree.maxKey(key)
        except ValueError:
            return None

    def getMaxIndex(self, index=None):
        """
    Return the maximum index
    """
        try:
            return self._long_index_tree.maxKey(index)
        except ValueError:
            return None

    def getMinKey(self, key=None):
        """
    Return the minimum key
    """
        try:
            return self._tree.minKey(key)
        except ValueError:
            return None

    def getMinIndex(self, index=None):
        """
    Return the minimum key
    """
        try:
            return self._long_index_tree.minKey(index)
        except ValueError:
            return None

    def _getOb(self, id, *args, **kw):
        return None

    def getBucketByKey(self, key=None):
        """
      Get one bucket
    """
        return self._tree[key].value

    def getBucketByIndex(self, index=None):
        """
      Get one bucket
    """
        key = self._long_index_tree[index]
        return self.getBucketByKey(key).value

    def getBucket(self, key):
        log('DeprecationWarning: Please use getBucketByKey')
        return self.getBucketByKey(key)

    def hasBucketKey(self, key):
        """
      Wether bucket with such key exists
    """
        return key in self._tree

    def hasBucketIndex(self, index):
        """
      Wether bucket with such index exists
    """
        return self._long_index_tree.has_key(index)

    def insertBucket(self, key, value):
        """
      Insert one bucket
    """
        try:
            count = self._long_index_tree.maxKey() + 1
        except ValueError:
            count = 0
        except AttributeError:
            pass
        try:
            self._long_index_tree.insert(count, key)
        except AttributeError:
            pass
        value = PersistentString(value)
        is_new_key = self._tree.insert(key, value)
        if not is_new_key:
            self.log("Reingestion of same key")
            self._tree[key] = value

    def getBucketKeySequenceByKey(self,
                                  start_key=None,
                                  stop_key=None,
                                  count=None,
                                  exclude_start_key=False,
                                  exclude_stop_key=False):
        """
      Get a lazy sequence of bucket keys
    """
        sequence = self._tree.keys(min=start_key,
                                   max=stop_key,
                                   excludemin=exclude_start_key,
                                   excludemax=exclude_stop_key)
        if count is None:
            return sequence
        return sequence[:count]

    def getBucketKeySequenceByIndex(self,
                                    start_index=None,
                                    stop_index=None,
                                    count=None,
                                    exclude_start_index=False,
                                    exclude_stop_index=False):
        """
      Get a lazy sequence of bucket keys
    """
        sequence = self._long_index_tree.values(min=start_index,
                                                max=stop_index,
                                                excludemin=exclude_start_index,
                                                excludemax=exclude_stop_index)
        if count is None:
            return sequence
        return sequence[:count]

    def getBucketKeySequence(self, start_key=None, count=None):
        log('DeprecationWarning: Please use getBucketKeySequenceByKey')
        return self.getBucketKeySequenceByKey(start_key=start_key, count=count)

    def getBucketIndexKeySequenceByIndex(self,
                                         start_index=None,
                                         stop_index=None,
                                         count=None,
                                         exclude_start_index=False,
                                         exclude_stop_index=False):
        """
      Get a lazy sequence of bucket keys
    """
        sequence = self._long_index_tree.items(min=start_index,
                                               max=stop_index,
                                               excludemin=exclude_start_index,
                                               excludemax=exclude_stop_index)
        if count is not None:
            sequence = sequence[:count]
        return IndexKeySequence(self, sequence)

    def getBucketIndexSequenceByIndex(self,
                                      start_index=None,
                                      stop_index=None,
                                      count=None,
                                      exclude_start_index=False,
                                      exclude_stop_index=False):
        """
      Get a lazy sequence of bucket keys
    """
        sequence = self._long_index_tree.keys(min=start_index,
                                              max=stop_index,
                                              excludemin=exclude_start_index,
                                              excludemax=exclude_stop_index)
        if count is None:
            return sequence
        return sequence[:count]

    def getBucketValueSequenceByKey(self,
                                    start_key=None,
                                    stop_key=None,
                                    count=None,
                                    exclude_start_key=False,
                                    exclude_stop_key=False):
        """
      Get a lazy sequence of bucket values
    """
        sequence = self._tree.values(min=start_key,
                                     max=stop_key,
                                     excludemin=exclude_start_key,
                                     excludemax=exclude_stop_key)
        if count is None:
            return sequence
        return sequence[:count]

    def getBucketValueSequenceByIndex(self,
                                      start_index=None,
                                      stop_index=None,
                                      count=None,
                                      exclude_start_index=False,
                                      exclude_stop_index=False):
        """
      Get a lazy sequence of bucket values
    """
        sequence = self._long_index_tree.values(min=start_index,
                                                max=stop_index,
                                                excludemin=exclude_start_index,
                                                excludemax=exclude_stop_index)
        if count is not None:
            sequence = sequence[:count]
        return IndexValueSequence(self, sequence)

    def getBucketValueSequence(self, start_key=None, count=None):
        log('DeprecationWarning: Please use getBucketValueSequenceByKey')
        return self.getBucketValueSequenceByKey(start_key=start_key,
                                                count=count)

    def getBucketKeyItemSequenceByKey(self,
                                      start_key=None,
                                      stop_key=None,
                                      count=None,
                                      exclude_start_key=False,
                                      exclude_stop_key=False):
        """
      Get a lazy sequence of bucket items
    """
        sequence = self._tree.items(min=start_key,
                                    max=stop_key,
                                    excludemin=exclude_start_key,
                                    excludemax=exclude_stop_key)
        if count is None:
            return sequence
        return sequence[:count]

    def getBucketItemSequence(self,
                              start_key=None,
                              count=None,
                              exclude_start_key=False):
        log('DeprecationWarning: Please use getBucketKeyItemSequenceByKey')
        return self.getBucketKeyItemSequenceByKey(
            start_key=start_key,
            count=count,
            exclude_start_key=exclude_start_key)

    def getBucketIndexItemSequenceByIndex(self,
                                          start_index=None,
                                          stop_index=None,
                                          count=None,
                                          exclude_start_index=False,
                                          exclude_stop_index=False):
        """
      Get a lazy sequence of bucket items
    """
        sequence = self._long_index_tree.items(min=start_index,
                                               max=stop_index,
                                               excludemin=exclude_start_index,
                                               excludemax=exclude_stop_index)
        if count is not None:
            sequence = sequence[:count]
        return IndexItemSequence(self, sequence)

    def getBucketIndexKeyItemSequenceByIndex(self,
                                             start_index=None,
                                             stop_index=None,
                                             count=None,
                                             exclude_start_index=False,
                                             exclude_stop_index=False):
        """
      Get a lazy sequence of bucket items
    """
        sequence = self._long_index_tree.items(min=start_index,
                                               max=stop_index,
                                               excludemin=exclude_start_index,
                                               excludemax=exclude_stop_index)
        if count is not None:
            sequence = sequence[:count]
        return IndexKeyItemSequence(self, sequence)

    def getItemList(self):
        """
      Return a list of all key, value pairs
    """
        return [item for item in self._tree.items()]

    def getKeyList(self):
        """
      Return a list of all keys
    """
        return [key for key in self._tree.keys()]

    def getIndexList(self):
        """
      Return a list of all indexes
    """
        return [key for key in self._long_index_tree.keys()]

    def getIndexKeyTupleList(self):
        """
      Return a list of all indexes
    """
        return [key for key in self._long_index_tree.items()]

    def getMd5sum(self, key):
        """
      Get hexdigest of bucket.
    """
        h = hashlib.md5()
        h.update(self.getBucketByKey(key))
        return h.hexdigest()

    def delBucketByKey(self, key):
        """
      Remove the bucket.
    """
        del self._tree[key]
        for index, my_key in list(self.getBucketIndexKeySequenceByIndex()):
            if my_key == key:
                del self._long_index_tree[index]

    def delBucketByIndex(self, index):
        """
      Remove the bucket.
    """
        key = self._long_index_tree[index]
        del self._tree[key]
        del self._long_index_tree[index]

    def rebuildIndexTreeByKeyOrder(self):
        """
        Clear and rebuild the index tree by order of keys
    """
        self.initIndexTree()
        for count, key in enumerate(self.getBucketKeySequenceByKey()):
            self._long_index_tree.insert(count, key)
Exemple #47
0
class BTreeFolder2Base (Persistent):
    """Base for BTree-based folders.
    """

    security = ClassSecurityInfo()

    manage_options=(
        ({'label':'Contents', 'action':'manage_main',},
         ) + Folder.manage_options[1:]
        )

    security.declareProtected(view_management_screens,
                              'manage_main')
    manage_main = DTMLFile('contents', globals())

    _tree = None      # OOBTree: { id -> object }
    _count = None     # A BTrees.Length
    _v_nextid = 0     # The integer component of the next generated ID
    _mt_index = None  # OOBTree: { meta_type -> OIBTree: { id -> 1 } }
    title = ''


    def __init__(self, id=None):
        if id is not None:
            self.id = id
        self._initBTrees()

    def _initBTrees(self):
        self._tree = OOBTree()
        self._count = Length()
        self._mt_index = OOBTree()


    def _populateFromFolder(self, source):
        """Fill this folder with the contents of another folder.
        """
        for name in source.objectIds():
            value = source._getOb(name, None)
            if value is not None:
                self._setOb(name, aq_base(value))


    security.declareProtected(view_management_screens, 'manage_fixCount')
    def manage_fixCount(self):
        """Calls self._fixCount() and reports the result as text.
        """
        old, new = self._fixCount()
        path = '/'.join(self.getPhysicalPath())
        if old == new:
            return "No count mismatch detected in BTreeFolder2 at %s." % path
        else:
            return ("Fixed count mismatch in BTreeFolder2 at %s. "
                    "Count was %d; corrected to %d" % (path, old, new))


    def _fixCount(self):
        """Checks if the value of self._count disagrees with
        len(self.objectIds()). If so, corrects self._count. Returns the
        old and new count values. If old==new, no correction was
        performed.
        """
        old = self._count()
        new = len(self.objectIds())
        if old != new:
            self._count.set(new)
        return old, new


    security.declareProtected(view_management_screens, 'manage_cleanup')
    def manage_cleanup(self):
        """Calls self._cleanup() and reports the result as text.
        """
        v = self._cleanup()
        path = '/'.join(self.getPhysicalPath())
        if v:
            return "No damage detected in BTreeFolder2 at %s." % path
        else:
            return ("Fixed BTreeFolder2 at %s.  "
                    "See the log for more details." % path)


    def _cleanup(self):
        """Cleans up errors in the BTrees.

        Certain ZODB bugs have caused BTrees to become slightly insane.
        Fortunately, there is a way to clean up damaged BTrees that
        always seems to work: make a new BTree containing the items()
        of the old one.

        Returns 1 if no damage was detected, or 0 if damage was
        detected and fixed.
        """
        from BTrees.check import check
        path = '/'.join(self.getPhysicalPath())
        try:
            check(self._tree)
            for key in self._tree.keys():
                if not self._tree.has_key(key):
                    raise AssertionError(
                        "Missing value for key: %s" % repr(key))
            check(self._mt_index)
            for key, value in self._mt_index.items():
                if (not self._mt_index.has_key(key)
                    or self._mt_index[key] is not value):
                    raise AssertionError(
                        "Missing or incorrect meta_type index: %s"
                        % repr(key))
                check(value)
                for k in value.keys():
                    if not value.has_key(k):
                        raise AssertionError(
                            "Missing values for meta_type index: %s"
                            % repr(key))
            return 1
        except AssertionError:
            LOG.warn( 'Detected damage to %s. Fixing now.' % path,
                exc_info=True)
            try:
                self._tree = OOBTree(self._tree)
                mt_index = OOBTree()
                for key, value in self._mt_index.items():
                    mt_index[key] = OIBTree(value)
                self._mt_index = mt_index
            except:
                LOG.error('Failed to fix %s.' % path,
                    exc_info=True)
                raise
            else:
                LOG.info('Fixed %s.' % path)
            return 0


    def _getOb(self, id, default=_marker):
        """Return the named object from the folder.
        """
        tree = self._tree
        if default is _marker:
            ob = tree[id]
            return ob.__of__(self)
        else:
            ob = tree.get(id, _marker)
            if ob is _marker:
                return default
            else:
                return ob.__of__(self)


    def _setOb(self, id, object):
        """Store the named object in the folder.
        """
        tree = self._tree
        if tree.has_key(id):
            raise KeyError('There is already an item named "%s".' % id)
        tree[id] = object
        self._count.change(1)
        # Update the meta type index.
        mti = self._mt_index
        meta_type = getattr(object, 'meta_type', None)
        if meta_type is not None:
            ids = mti.get(meta_type, None)
            if ids is None:
                ids = OIBTree()
                mti[meta_type] = ids
            ids[id] = 1


    def _delOb(self, id):
        """Remove the named object from the folder.
        """
        tree = self._tree
        meta_type = getattr(tree[id], 'meta_type', None)
        del tree[id]
        self._count.change(-1)
        # Update the meta type index.
        if meta_type is not None:
            mti = self._mt_index
            ids = mti.get(meta_type, None)
            if ids is not None and ids.has_key(id):
                del ids[id]
                if not ids:
                    # Removed the last object of this meta_type.
                    # Prune the index.
                    del mti[meta_type]


    security.declareProtected(view_management_screens, 'getBatchObjectListing')
    def getBatchObjectListing(self, REQUEST=None):
        """Return a structure for a page template to show the list of objects.
        """
        if REQUEST is None:
            REQUEST = {}
        pref_rows = int(REQUEST.get('dtpref_rows', 20))
        b_start = int(REQUEST.get('b_start', 1))
        b_count = int(REQUEST.get('b_count', 1000))
        b_end = b_start + b_count - 1
        url = self.absolute_url() + '/manage_main'
        idlist = self.objectIds()  # Pre-sorted.
        count = self.objectCount()

        if b_end < count:
            next_url = url + '?b_start=%d' % (b_start + b_count)
        else:
            b_end = count
            next_url = ''

        if b_start > 1:
            prev_url = url + '?b_start=%d' % max(b_start - b_count, 1)
        else:
            prev_url = ''

        formatted = []
        formatted.append(listtext0 % pref_rows)
        for i in range(b_start - 1, b_end):
            optID = escape(idlist[i])
            formatted.append(listtext1 % (escape(optID, quote=1), optID))
        formatted.append(listtext2)
        return {'b_start': b_start, 'b_end': b_end,
                'prev_batch_url': prev_url,
                'next_batch_url': next_url,
                'formatted_list': ''.join(formatted)}


    security.declareProtected(view_management_screens,
                              'manage_object_workspace')
    def manage_object_workspace(self, ids=(), REQUEST=None):
        '''Redirects to the workspace of the first object in
        the list.'''
        if ids and REQUEST is not None:
            REQUEST.RESPONSE.redirect(
                '%s/%s/manage_workspace' % (
                self.absolute_url(), quote(ids[0])))
        else:
            return self.manage_main(self, REQUEST)


    security.declareProtected(access_contents_information,
                              'tpValues')
    def tpValues(self):
        """Ensures the items don't show up in the left pane.
        """
        return ()


    security.declareProtected(access_contents_information,
                              'objectCount')
    def objectCount(self):
        """Returns the number of items in the folder."""
        return self._count()


    security.declareProtected(access_contents_information, 'has_key')
    def has_key(self, id):
        """Indicates whether the folder has an item by ID.
        """
        return self._tree.has_key(id)


    security.declareProtected(access_contents_information,
                              'objectIds')
    def objectIds(self, spec=None):
        # Returns a list of subobject ids of the current object.
        # If 'spec' is specified, returns objects whose meta_type
        # matches 'spec'.
        if spec is not None:
            if isinstance(spec, StringType):
                spec = [spec]
            mti = self._mt_index
            set = None
            for meta_type in spec:
                ids = mti.get(meta_type, None)
                if ids is not None:
                    set = union(set, ids)
            if set is None:
                return ()
            else:
                return set.keys()
        else:
            return self._tree.keys()


    security.declareProtected(access_contents_information,
                              'objectValues')
    def objectValues(self, spec=None):
        # Returns a list of actual subobjects of the current object.
        # If 'spec' is specified, returns only objects whose meta_type
        # match 'spec'.
        return LazyMap(self._getOb, self.objectIds(spec))


    security.declareProtected(access_contents_information,
                              'objectItems')
    def objectItems(self, spec=None):
        # Returns a list of (id, subobject) tuples of the current object.
        # If 'spec' is specified, returns only objects whose meta_type match
        # 'spec'
        return LazyMap(lambda id, _getOb=self._getOb: (id, _getOb(id)),
                       self.objectIds(spec))


    security.declareProtected(access_contents_information,
                              'objectMap')
    def objectMap(self):
        # Returns a tuple of mappings containing subobject meta-data.
        return LazyMap(lambda (k, v):
                       {'id': k, 'meta_type': getattr(v, 'meta_type', None)},
                       self._tree.items(), self._count())

    # superValues() looks for the _objects attribute, but the implementation
    # would be inefficient, so superValues() support is disabled.
    _objects = ()


    security.declareProtected(access_contents_information,
                              'objectIds_d')
    def objectIds_d(self, t=None):
        ids = self.objectIds(t)
        res = {}
        for id in ids:
            res[id] = 1
        return res


    security.declareProtected(access_contents_information,
                              'objectMap_d')
    def objectMap_d(self, t=None):
        return self.objectMap()


    def _checkId(self, id, allow_dup=0):
        if not allow_dup and self.has_key(id):
            raise BadRequestException, ('The id "%s" is invalid--'
                                        'it is already in use.' % id)


    def _setObject(self, id, object, roles=None, user=None, set_owner=1,
                   suppress_events=False):
        ob = object # better name, keep original function signature
        v = self._checkId(id)
        if v is not None:
            id = v

        # If an object by the given id already exists, remove it.
        if self.has_key(id):
            self._delObject(id)

        if not suppress_events:
            notify(ObjectWillBeAddedEvent(ob, self, id))

        self._setOb(id, ob)
        ob = self._getOb(id)

        if set_owner:
            # TODO: eventify manage_fixupOwnershipAfterAdd
            # This will be called for a copy/clone, or a normal _setObject.
            ob.manage_fixupOwnershipAfterAdd()

            # Try to give user the local role "Owner", but only if
            # no local roles have been set on the object yet.
            if getattr(ob, '__ac_local_roles__', _marker) is None:
                user = getSecurityManager().getUser()
                if user is not None:
                    userid = user.getId()
                    if userid is not None:
                        ob.manage_setLocalRoles(userid, ['Owner'])

        if not suppress_events:
            notify(ObjectAddedEvent(ob, self, id))
            notifyContainerModified(self)

        OFS.subscribers.compatibilityCall('manage_afterAdd', ob, ob, self)

        return id


    def _delObject(self, id, dp=1, suppress_events=False):
        ob = self._getOb(id)

        OFS.subscribers.compatibilityCall('manage_beforeDelete', ob, ob, self)

        if not suppress_events:
            notify(ObjectWillBeRemovedEvent(ob, self, id))

        self._delOb(id)

        if not suppress_events:
            notify(ObjectRemovedEvent(ob, self, id))
            notifyContainerModified(self)


    # Aliases for mapping-like access.
    __len__ = objectCount
    keys = objectIds
    values = objectValues
    items = objectItems

    # backward compatibility
    hasObject = has_key

    security.declareProtected(access_contents_information, 'get')
    def get(self, name, default=None):
        return self._getOb(name, default)


    # Utility for generating unique IDs.

    security.declareProtected(access_contents_information, 'generateId')
    def generateId(self, prefix='item', suffix='', rand_ceiling=999999999):
        """Returns an ID not used yet by this folder.

        The ID is unlikely to collide with other threads and clients.
        The IDs are sequential to optimize access to objects
        that are likely to have some relation.
        """
        tree = self._tree
        n = self._v_nextid
        attempt = 0
        while 1:
            if n % 4000 != 0 and n <= rand_ceiling:
                id = '%s%d%s' % (prefix, n, suffix)
                if not tree.has_key(id):
                    break
            n = randint(1, rand_ceiling)
            attempt = attempt + 1
            if attempt > MAX_UNIQUEID_ATTEMPTS:
                # Prevent denial of service
                raise ExhaustedUniqueIdsError
        self._v_nextid = n + 1
        return id

    def __getattr__(self, name):
        # Boo hoo hoo!  Zope 2 prefers implicit acquisition over traversal
        # to subitems, and __bobo_traverse__ hooks don't work with
        # restrictedTraverse() unless __getattr__() is also present.
        # Oh well.
        res = self._tree.get(name)
        if res is None:
            raise AttributeError, name
        return res
Exemple #48
0
 def CreateIndex(self, fileName, indexNo, atrrType, attrLength):
     with open(fileName + "." + str(indexNo), "w", encoding="utf-8") as f:
         tree = Tree()
         tree_dict = dict(tree.items())
         json.dump(tree_dict, f)
     return self
Exemple #49
0
class LookupTable(base.Base):
    "LookupTable class"

    meta_type = "LookupTable"
    security = ClassSecurityInfo()
    records = None
    recordsLength = None

    drawDict = base.Base.drawDict.copy()
    drawDict['drawTable'] = 'drawTable'

    security.declareProtected('View management screens', 'edit')
    def edit(self, *args, **kw):
        "Inline edit short object"
        format = "<p>Currently there are %s records</p><div>%s</div>"
        if self.records is None:
            self.records = OOBTree()
        lenRecords = self.recordsLength() if self.recordsLength is not None else 0
        return format % (lenRecords, self.create_button('clear', "Clear"))

    security.declarePrivate('processRecorderChanges')
    def processRecorderChanges(self, form):
        "process the recorder changes"
        clear = form.pop('clear', None)
        if clear is not None:
            self.clear()

    security.declarePrivate('after_manage_edit')
    def before_manage_edit(self, form):
        "process the edits"
        self.processRecorderChanges(form)

    security.declareProtected('View management screens', "drawTable")
    def drawTable(self):
        "Render page"
        temp = []
        format = '<p>%s:%s</p>'
        if self.records is not None:
            for key,value in self.records.items():
                temp.append(format % (repr(key), repr(value)))
        return ''.join(temp)

    security.declareProtected('Python Record Modification', 'insert')
    def insert(self, key, value):
        "modify this key and value into the OOBTree"
        if self.records is None:
            self.records = OOBTree()
        if self.recordsLength is None:
            self.setObject('recordsLength' ,BTrees.Length.Length())
        
        if key not in self.records:
            self.recordsLength.change(1)
        self.records.insert(key,value)

    security.declareProtected('Python Record Modification', 'add')
    def add(self, key, value):
        "this this key and value into the OOBTree"
        if self.records is None:
            self.records = OOBTree()
        if self.recordsLength is None:
            self.setObject('recordsLength' ,BTrees.Length.Length())
        
        if key not in self.records:
            self.recordsLength.change(1)
        self.records[key] = value

    security.declareProtected('Python Record Access', 'items')
    def items(self, min=None, max=None):
        "return the items in this OOBTree"
        if self.records is None:
            return []
        return self.records.items(min, max)

    security.declareProtected('Python Record Access', 'values')
    def values(self, min=None, max=None):
        "return the values of this OOBTree"
        if self.records is None:
            return []
        return self.records.values(min, max)

    security.declareProtected('Python Record Modification', 'update')
    def update(self, collection):
        "update our OOBTree with the data in collection"
        if self.records is None:
            self.records = OOBTree()
        if self.recordsLength is None:
            self.setObject('recordsLength' ,BTrees.Length.Length())
        
        records = self.records
        change = self.recordsLength.change
        for key,value in collection.items():
            if key not in records:
                change(1)
            records[key] = value

    security.declareProtected('Python Record Access', 'keys')
    def keys(self, min=None, max=None):
        "return the keys of this OOBTree"
        if self.records is None:
            return []
        return self.records.keys(min,max)

    security.declareProtected('Python Record Modification', '__delitem__')
    def __delitem__(self, key):
        "delete this key from the OOBTree"
        if self.records is not None:
            del self.records[key]
            self.recordsLength.change(-1)

    security.declareProtected('Python Record Modification', 'remove')
    def remove(self, key):
        "delete this key from the OOBTree"
        if self.records is not None:
            del self.records[key]
            self.recordsLength.change(-1)

    security.declareProtected('Python Record Modification', '__setitem__')
    def __setitem__(self, key, value):
        "set this key and value in the OOBTree"
        if self.records is None:
            self.records = OOBTree()
        self.records[key] = value

    security.declareProtected('Python Record Access', '__getitem__')
    def __getitem__(self, index):
        "get this item from the OOBTree"
        if self.records is not None:
            return self.records[index]
        raise KeyError, index

    security.declareProtected('Python Record Access', 'get')
    def get(self, key, default=None):
        "get this item from the OOBTree"
        if self.records is not None:
            return self.records.get(key,default)
        return default

    security.declareProtected('Python Record Access', 'has_key')
    def has_key(self, key):
        "see if we have this key in the OOBTree"
        if self.records is not None:
            return self.records.has_key(key)
        return False

    security.declareProtected('Python Record Modification', 'clear')
    def clear(self):
        "clear the OOBTree"
        self.setObject('records', None)
        self.setObject('recordsLength', None)
        
    security.declarePrivate("PrincipiaSearchSource")
    def PrincipiaSearchSource(self):
        "This is the basic search function"
        return ''
      
    security.declarePrivate('classUpgrader')
    def classUpgrader(self):
        "upgrade this class"
        self.createBTreeLength() 
      
    security.declarePrivate('createBTreeLength')
    def createBTreeLength(self):
        "remove Filters that are not being used"
        if self.records is not None:
            length = BTrees.Length.Length()
            length.set(len(self.records))
            self.setObject('recordsLength', length)
    createBTreeLength = utility.upgradeLimit(createBTreeLength, 165)
def group(self, seq):
    sortIndex = self._sortIndex
    sortReverse = self._sortReverse
    ns = len(seq)
    ni = len(sortIndex)

    if ns >= 0.1 * ni:
        # result large compared to index -- sort via index
        handled = IISet()
        hn = 0
        _load = getattr(sortIndex, '_load', None)
        if _load is None:
            # not an optimized index
            items = sortIndex.items()
            _load = lambda (x1, x2): x2
            if sortReverse:
                items.reverse()
        elif sortReverse:
            gRO = getattr(sortIndex, 'getReverseOrder', None)
            items = gRO and gRO()
            if items is None:
                items = list(sortIndex._index.keys())
                items.reverse()
        else:
            items = sortIndex._index.keys()

        for i in items:
            ids = intersection(seq, _load(i))
            if ids:
                handled.update(ids)
                hn += len(ids)
                yield i, ids
        if hn != len(seq):
            yield None, difference(seq, handled)
    else:
        # result relatively small -- sort via result
        m = OOBTree()
        keyFor = getattr(sortIndex, 'keyForDocument', None)
        # work around "nogopip" bug: it defines "keyForDocument" as an integer
        if not callable(keyFor):
            # this will fail, when the index neither defines a reasonable
            # "keyForDocument" nor "documentToKeyMap". In this case,
            # the index cannot be used for sorting.
            keyFor = lambda doc, map=sortIndex.documentToKeyMap(): map[doc]
        noValue = IITreeSet()

        for doc in seq.keys():
            try:
                k = keyFor(doc)
            except KeyError:
                noValue.insert(doc)
                continue

            k = NaturalObjectCompare(k)
            l = m.get(k)
            if l is None: l = m[k] = IITreeSet()
            l.insert(doc)
        items = m.items()
        if sortReverse:
            items = list(items)
            items.reverse()

        for i in items:
            yield i
        if noValue: yield None, noValue
Exemple #51
0
class TopicIndex(Persistent, SimpleItem):

    """A TopicIndex maintains a set of FilteredSet objects.

    Every FilteredSet object consists of an expression and and IISet with all
    Ids of indexed objects that eval with this expression to 1.
    """
    implements(ITopicIndex, IPluggableIndex)

    meta_type="TopicIndex"
    query_options = ('query', 'operator')

    manage_options= (
        {'label': 'FilteredSets', 'action': 'manage_main'},
    )

    def __init__(self,id,caller=None):
        self.id = id
        self.filteredSets  = OOBTree()
        self.operators = ('or','and')
        self.defaultOperator = 'or'

    def getId(self):
        return self.id

    def clear(self):
        for fs in self.filteredSets.values():
            fs.clear()

    def index_object(self, docid, obj ,threshold=100):
        """ hook for (Z)Catalog """
        for fid, filteredSet in self.filteredSets.items():
            filteredSet.index_object(docid,obj)
        return 1

    def unindex_object(self,docid):
        """ hook for (Z)Catalog """

        for fs in self.filteredSets.values():
            try:
                fs.unindex_object(docid)
            except KeyError:
                LOG.debug('Attempt to unindex document'
                          ' with id %s failed' % docid)
        return 1

    def numObjects(self):
        """Return the number of indexed objects."""
        return "n/a"

    def indexSize(self):
        """Return the size of the index in terms of distinct values."""
        return "n/a"

    def search(self,filter_id):
        if self.filteredSets.has_key(filter_id):
            return self.filteredSets[filter_id].getIds()

    def _apply_index(self, request):
        """ hook for (Z)Catalog
            'request' --  mapping type (usually {"topic": "..." }
        """
        record = parseIndexRequest(request, self.id, self.query_options)
        if record.keys is None:
            return None

        operator = record.get('operator', self.defaultOperator).lower()
        if operator == 'or':  set_func = union
        else: set_func = intersection

        res = None
        for filter_id in record.keys:
            rows = self.search(filter_id)
            res = set_func(res,rows)

        if res:
            return res, (self.id,)
        else:
            return IITreeSet(), (self.id,)

    def uniqueValues(self,name=None, withLength=0):
        """ needed to be consistent with the interface """
        return self.filteredSets.keys()

    def getEntryForObject(self,docid, default=_marker):
        """ Takes a document ID and returns all the information we have
            on that specific object.
        """
        return self.filteredSets.keys()

    def addFilteredSet(self, filter_id, typeFilteredSet, expr):
        # Add a FilteredSet object.
        if self.filteredSets.has_key(filter_id):
            raise KeyError,\
                'A FilteredSet with this name already exists: %s' % filter_id
        self.filteredSets[filter_id] = factory(filter_id,
                                               typeFilteredSet,
                                               expr,
                                              )

    def delFilteredSet(self, filter_id):
        # Delete the FilteredSet object specified by 'filter_id'.
        if not self.filteredSets.has_key(filter_id):
            raise KeyError,\
                'no such FilteredSet:  %s' % filter_id
        del self.filteredSets[filter_id]

    def clearFilteredSet(self, filter_id):
        # Clear the FilteredSet object specified by 'filter_id'.
        if not self.filteredSets.has_key(filter_id):
            raise KeyError,\
                'no such FilteredSet:  %s' % filter_id
        self.filteredSets[filter_id].clear()

    def manage_addFilteredSet(self, filter_id, typeFilteredSet, expr, URL1, \
            REQUEST=None,RESPONSE=None):
        """ add a new filtered set """

        if len(filter_id) == 0: raise RuntimeError,'Length of ID too short'
        if len(expr) == 0: raise RuntimeError,'Length of expression too short'

        self.addFilteredSet(filter_id, typeFilteredSet, expr)

        if RESPONSE:
            RESPONSE.redirect(URL1+'/manage_workspace?'
            'manage_tabs_message=FilteredSet%20added')

    def manage_delFilteredSet(self, filter_ids=[], URL1=None, \
            REQUEST=None,RESPONSE=None):
        """ delete a list of FilteredSets"""

        for filter_id in filter_ids:
            self.delFilteredSet(filter_id)

        if RESPONSE:
            RESPONSE.redirect(URL1+'/manage_workspace?'
            'manage_tabs_message=FilteredSet(s)%20deleted')

    def manage_saveFilteredSet(self,filter_id, expr, URL1=None,\
            REQUEST=None,RESPONSE=None):
        """ save expression for a FilteredSet """

        self.filteredSets[filter_id].setExpression(expr)

        if RESPONSE:
            RESPONSE.redirect(URL1+'/manage_workspace?'
            'manage_tabs_message=FilteredSet(s)%20updated')

    def getIndexSourceNames(self):
        """ return names of indexed attributes """
        return ('n/a',)

    def getIndexQueryNames(self):
        return (self.id,)

    def manage_clearFilteredSet(self, filter_ids=[], URL1=None, \
            REQUEST=None,RESPONSE=None):
        """  clear a list of FilteredSets"""

        for filter_id in filter_ids:
            self.clearFilteredSet(filter_id)

        if RESPONSE:
            RESPONSE.redirect(URL1+'/manage_workspace?'
             'manage_tabs_message=FilteredSet(s)%20cleared')

    manage = manage_main = DTMLFile('dtml/manageTopicIndex',globals())
    manage_main._setName('manage_main')
    editFilteredSet = DTMLFile('dtml/editFilteredSet',globals())
Exemple #52
0
class ZODBMutablePropertyProvider(BasePlugin):
    """Storage for mutable properties in the ZODB for users/groups.

    API sounds like it's only for users, but groups work as well.
    """

    meta_type = 'ZODB Mutable Property Provider'

    security = ClassSecurityInfo()

    def __init__(self, id, title='', schema=None, **kw):
        """Create in-ZODB mutable property provider.

        Provide a schema either as a list of (name,type,value) tuples
        in the 'schema' parameter or as a series of keyword parameters
        'name=value'. Types will be guessed in this case.

        The 'value' is meant as the default value, and will be used
        unless the user provides data.

        If no schema is provided by constructor, the properties of the
        portal_memberdata object will be used.

        Types available: string, text, boolean, int, long, float, lines, date
        """
        self.id = id
        self.title = title
        self._storage = OOBTree()

        # calculate schema and default values
        defaultvalues = {}
        if not schema and not kw:
            schema = ()
        elif not schema and kw:
            schema = _guessSchema(kw)
            defaultvalues = kw
        else:
            valuetuples = [(name, value) for name, type, value in schema]
            schema = [(name, type) for name, type, value in schema]
            for name, value in valuetuples:
                defaultvalues[name] = value
        self._schema = tuple(schema)
        self._defaultvalues = defaultvalues

    def _getSchema(self, isgroup=None):
        # this could probably stand to be cached
        datatool = isgroup and "portal_groupdata" or "portal_memberdata"

        schema = self._schema
        if not schema:
            # if no schema is provided, use portal_memberdata properties
            schema = ()
            mdtool = getToolByName(self, datatool, None)
            # Don't fail badly if tool is not available.
            if mdtool is not None:
                mdschema = mdtool.propertyMap()
                schema = [(elt['id'], elt['type']) for elt in mdschema]
        return schema

    def _getDefaultValues(self, isgroup=None):
        """Returns a dictionary mapping of property names to default values.
        Defaults to portal_*data tool if necessary.
        """
        datatool = isgroup and "portal_groupdata" or "portal_memberdata"

        defaultvalues = self._defaultvalues
        if not self._schema:
            # if no schema is provided, use portal_*data properties
            defaultvalues = {}
            mdtool = getToolByName(self, datatool, None)
            # Don't fail badly if tool is not available.
            if mdtool is not None:
                # we rely on propertyMap and propertyItems mapping
                mdvalues = mdtool.propertyItems()
                for name, value in mdvalues:
                    # For selection types the default value is the name of a
                    # method which returns the possible values. There is no way
                    # to set a default value for those types.
                    ptype = mdtool.getPropertyType(name)
                    if ptype == "selection":
                        defaultvalues[name] = ""
                    elif ptype == "multiple selection":
                        defaultvalues[name] = []
                    else:
                        defaultvalues[name] = value

            # ALERT! if someone gives their *_data tool a title, and want a
            #        title as a property of the user/group (and groups do by
            #        default) we don't want them all to have this title, since
            #        a title is used in the UI if it exists
            if defaultvalues.get("title"):
                defaultvalues["title"] = ""
        return defaultvalues

    @security.private
    def getPropertiesForUser(self, user, request=None):
        """Get property values for a user or group.
        Returns a dictionary of values or a PropertySheet.

        This implementation will always return a MutablePropertySheet.

        NOTE: Must always return something, or else the property sheet
        won't get created and this will screw up portal_memberdata.
        """
        isGroup = getattr(user, 'isGroup', lambda: None)()

        data = self._storage.get(user.getId())
        defaults = self._getDefaultValues(isGroup)

        # provide default values where missing
        if not data:
            data = {}
        for key, val in defaults.items():
            if key not in data:
                data[key] = val

        return MutablePropertySheet(self.id,
                                    schema=self._getSchema(isGroup),
                                    **data)

    @security.private
    def setPropertiesForUser(self, user, propertysheet):
        """Set the properties of a user or group based on the contents of a
        property sheet.
        """
        isGroup = getattr(user, 'isGroup', lambda: None)()

        properties = dict(propertysheet.propertyItems())

        for name, property_type in self._getSchema(isGroup) or ():
            if (name in properties
                    and not validateValue(property_type, properties[name])):
                raise ValueError('Invalid value: %s does not conform to %s' %
                                 (name, property_type))

        allowed_prop_keys = [pn for pn, pt in self._getSchema(isGroup) or ()]
        if allowed_prop_keys:
            prop_names = set(properties.keys()) - set(allowed_prop_keys)
            if prop_names:
                raise ValueError('Unknown Properties: %r' % prop_names)

        userid = user.getId()
        userprops = self._storage.get(userid)
        properties.update({'isGroup': isGroup})
        if userprops is not None:
            userprops.update(properties)
            # notify persistence machinery of change
            self._storage[userid] = self._storage[userid]
        else:
            self._storage.insert(user.getId(), properties)

    @security.private
    def deleteUser(self, user_id):
        """Delete all user properties
        """
        # Do nothing if an unknown user_id is given
        try:
            del self._storage[user_id]
        except KeyError:
            pass

    @security.private
    def testMemberData(self, memberdata, criteria, exact_match=False):
        """Test if a memberdata matches the search criteria.
        """
        for (key, value) in criteria.items():
            testvalue = memberdata.get(key, None)
            if testvalue is None:
                return False

            if isStringType(testvalue):
                testvalue = safe_unicode(testvalue.lower())
            if isStringType(value):
                value = safe_unicode(value.lower())

            if exact_match:
                if value != testvalue:
                    return False
            else:
                try:
                    if value not in testvalue:
                        return False
                except TypeError:
                    # Fall back to exact match if we can check for
                    # sub-component
                    if value != testvalue:
                        return False

        return True

    @security.private
    def enumerateUsers(self, id=None, login=None, exact_match=False, **kw):
        """ See IUserEnumerationPlugin.
        """
        plugin_id = self.getId()

        # This plugin can't search for a user by id or login, because there is
        # no such keys in the storage (data dict in the comprehensive list)
        # If kw is empty or not, we continue the search.
        if id is not None or login is not None:
            return ()

        criteria = copy.copy(kw)

        users = [(user, data) for (user, data) in self._storage.items()
                 if self.testMemberData(data, criteria, exact_match)
                 and not data.get('isGroup', False)]

        user_info = [{
            'id': self.prefix + user_id,
            'login': user_id,
            'title': data.get('fullname', user_id),
            'description': data.get('fullname', user_id),
            'email': data.get('email', ''),
            'pluginid': plugin_id
        } for (user_id, data) in users]

        return tuple(user_info)

    def updateUser(self, user_id, login_name):
        """ Update the login name of the user with id user_id.

        This is a new part of the IUserEnumerationPlugin interface, but
        not interesting for us.
        """
        pass

    def updateEveryLoginName(self, quit_on_first_error=True):
        """Update login names of all users to their canonical value.

        This is a new part of the IUserEnumerationPlugin interface, but
        not interesting for us.
        """
        pass
Exemple #53
0
class ZODBGroupManager( BasePlugin ):

    """ PAS plugin for managing groups, and groups of groups in the ZODB
    """
    __implements__ = ( IGroupEnumerationPlugin
                     , IGroupsPlugin
                     )

    meta_type = 'ZODB Group Manager'

    security = ClassSecurityInfo()

    def __init__(self, id, title=None):

        self._id = self.id = id
        self.title = title
        self._groups = OOBTree()
        self._principal_groups = OOBTree()

    #
    #   IGroupEnumerationPlugin implementation
    #
    security.declarePrivate( 'enumerateGroups' )
    def enumerateGroups( self
                        , id=None
                        , title=None
                        , exact_match=False
                        , sort_by=None
                        , max_results=None
                        , **kw
                        ):

        """ See IGroupEnumerationPlugin.
        """
        group_info = []
        group_ids = []
        plugin_id = self.getId()

        if isinstance( id, str ):
            id = [ id ]

        if isinstance( title, str ):
            title = [ title ]

        if exact_match and ( id or title ):

            if id:
                group_ids.extend( id )
            elif title:
                group_ids.extend( title )

        if group_ids:
            group_filter = None

        else:   # Searching
            group_ids = self.listGroupIds()
            group_filter = _ZODBGroupFilter( id, title, **kw )

        for group_id in group_ids:

            if self._groups.get( group_id, None ):
                e_url = '%s/manage_groups' % self.getId()
                p_qs = 'group_id=%s' % group_id
                m_qs = 'group_id=%s&assign=1' % group_id

                info = {}
                info.update( self._groups[ group_id ] )
                
                info[ 'pluginid' ] = plugin_id
                info[ 'properties_url' ] = '%s?%s' % ( e_url, p_qs )
                info[ 'members_url' ] = '%s?%s' % ( e_url, m_qs )
                
                if not group_filter or group_filter( info ):
                    group_info.append( info )

        return tuple( group_info )

    #
    #   IGroupsPlugin implementation
    #
    security.declarePrivate( 'getGroupsForPrincipal' )
    def getGroupsForPrincipal( self, principal, request=None ):

        """ See IGroupsPlugin.
        """
        return tuple( self._principal_groups.get( principal.getId(), () ) )

    #
    #   (notional)IZODBGroupManager interface
    #
    security.declareProtected( ManageGroups, 'listGroupIds' )
    def listGroupIds( self ):

        """ -> ( group_id_1, ... group_id_n )
        """
        return self._groups.keys()

    security.declareProtected( ManageGroups, 'listGroupInfo' )
    def listGroupInfo( self ):

        """ -> ( {}, ...{} )

        o Return one mapping per group, with the following keys:

          - 'id' 
        """
        return self._groups.values()

    security.declareProtected( ManageGroups, 'getGroupInfo' )
    def getGroupInfo( self, group_id ):

        """ group_id -> {}
        """
        return self._groups[ group_id ]

    security.declarePrivate( 'addGroup' )
    def addGroup( self, group_id, title=None, description=None ):

        """ Add 'group_id' to the list of groups managed by this object.

        o Raise KeyError on duplicate.
        """
        if self._groups.get( group_id ) is not None:
            raise KeyError, 'Duplicate group ID: %s' % group_id

        self._groups[ group_id ] = { 'id' : group_id
                                   , 'title' : title
                                   , 'description' : description
                                   }

    security.declarePrivate( 'updateGroup' )
    def updateGroup( self, group_id, title, description ):

        """ Update properties for 'group_id'

        o Raise KeyError if group_id doesn't already exist.
        """
        self._groups[ group_id ].update({ 'title' : title
                                        , 'description' : description
                                        })
        self._groups[ group_id ] = self._groups[ group_id ]

    security.declarePrivate( 'removeGroup' )
    def removeGroup( self, group_id ):

        """ Remove 'role_id' from the list of roles managed by this
            object, removing assigned members from it before doing so.

        o Raise KeyError if 'group_id' doesn't already exist.
        """
        for principal_id in self._principal_groups.keys():
            self.removePrincipalFromGroup( principal_id, group_id )
        del self._groups[ group_id ]

    #
    #   Group assignment API
    #
    security.declareProtected( ManageGroups, 'listAvailablePrincipals' )
    def listAvailablePrincipals( self, group_id, search_name ):

        """ Return a list of principal IDs to that can belong to the group.

        o If supplied, 'search_name' constrains the principal IDs;  if not,
          return empty list.

        o Omit principals with existing assignments.
        """
        result = []

        if search_name:  # don't bother searching if no criteria

            parent = aq_parent( self )

            for info in parent.searchPrincipals( max_results=20
                                               , sort_by='id'
                                               , name=search_name
                                               , exact_match=False
                                               ):
                id = info[ 'id' ]
                title = info.get( 'title', id )
                if ( group_id not in self._principal_groups.get( id, () )
                 and group_id != id ):
                    result.append( ( id, title ) )
        
        return result

    security.declareProtected( ManageGroups, 'listAssignedPrincipals' )
    def listAssignedPrincipals( self, group_id ):

        """ Return a list of principal IDs belonging to a group.
        """
        result = []

        for k, v in self._principal_groups.items():
            if group_id in v:
                # should be one and only one mapping to 'k'

                parent = aq_parent( self )
                info = parent.searchPrincipals( id=k, exact_match=True )
                assert( len( info ) == 1 )
                result.append( ( k, info[0].get( 'title', k ) ) )
        
        return result

    security.declareProtected( ManageGroups, 'addPrincipalToGroup' )
    def addPrincipalToGroup( self, principal_id, group_id ):

        """ Add a principal to a group.

        o Return a boolean indicating whether a new assignment was created.

        o Raise KeyError if 'group_id' is unknown.
        """
        group_info = self._groups[ group_id ] # raise KeyError if unknown!

        current = self._principal_groups.get( principal_id, () )
        already = group_id in current

        if not already:
            new = current + ( group_id, )
            self._principal_groups[ principal_id ] = new

        return not already

    security.declareProtected( ManageGroups, 'removePrincipalFromGroup' )
    def removePrincipalFromGroup( self, principal_id, group_id ):

        """ Remove a prinicpal from from a group.

        o Return a boolean indicating whether the principal was already 
          a member of the group.

        o Raise KeyError if 'group_id' is unknown.

        o Ignore requests to remove a principal if not already a member
          of the group.
        """
        group_info = self._groups[ group_id ] # raise KeyError if unknown!

        current = self._principal_groups.get( principal_id, () )
        new = tuple( [ x for x in current if x != group_id ] )
        already = current != new

        if already:
            self._principal_groups[ principal_id ] = new

        return already

    #
    #   ZMI
    #
    manage_options = ( ( { 'label': 'Groups', 
                           'action': 'manage_groups', }
                         ,
                       )
                     + BasePlugin.manage_options
                     )

    security.declarePublic( 'manage_widgets' )
    manage_widgets = PageTemplateFile( 'www/zuWidgets'
                                     , globals()
                                     , __name__='manage_widgets'
                                     )

    security.declareProtected( ManageGroups, 'manage_groups' )
    manage_groups = PageTemplateFile( 'www/zgGroups'
                                    , globals()
                                    , __name__='manage_groups'
                                    )

    security.declareProtected( ManageGroups, 'manage_twoLists' )
    manage_twoLists = PageTemplateFile( '../www/two_lists'
                                      , globals()
                                      , __name__='manage_twoLists'
                                      )

    security.declareProtected( ManageGroups, 'manage_addGroup' )
    def manage_addGroup( self
                       , group_id
                       , title=None
                       , description=None
                       , RESPONSE=None
                       ):
        """ Add a group via the ZMI.
        """
        self.addGroup( group_id, title, description )

        message = 'Group+added'

        if RESPONSE is not None:
            RESPONSE.redirect( '%s/manage_groups?manage_tabs_message=%s'
                             % ( self.absolute_url(), message )
                             )

    security.declareProtected( ManageGroups, 'manage_updateGroup' )
    def manage_updateGroup( self
                          , group_id
                          , title
                          , description
                          , RESPONSE=None
                          ):
        """ Update a group via the ZMI.
        """
        self.updateGroup( group_id, title, description )

        message = 'Group+updated'

        if RESPONSE is not None:
            RESPONSE.redirect( '%s/manage_groups?manage_tabs_message=%s'
                             % ( self.absolute_url(), message )
                             )

    security.declareProtected( ManageGroups, 'manage_removeGroups' )
    def manage_removeGroups( self
                           , group_ids
                           , RESPONSE=None
                           ):
        """ Remove one or more groups via the ZMI.
        """
        group_ids = filter( None, group_ids )

        if not group_ids:
            message = 'no+groups+selected'

        else:
        
            for group_id in group_ids:
                self.removeGroup( group_id )

            message = 'Groups+removed'

        if RESPONSE is not None:
            RESPONSE.redirect( '%s/manage_groups?manage_tabs_message=%s'
                             % ( self.absolute_url(), message )
                             )

    security.declareProtected( ManageGroups, 'manage_addPrincipalsToGroup' )
    def manage_addPrincipalsToGroup( self
                                   , group_id
                                   , principal_ids
                                   , RESPONSE=None
                                   ):
        """ Add one or more principals to a group via the ZMI.
        """
        assigned = []

        for principal_id in principal_ids:
            if self.addPrincipalToGroup( principal_id, group_id ):
                assigned.append( principal_id )

        if not assigned:
            message = 'Principals+already+members+of+%s' % group_id
        else:
            message = '%s+added+to+%s' % ( '+'.join( assigned )
                                         , group_id
                                         )

        if RESPONSE is not None:
            RESPONSE.redirect( ( '%s/manage_groups?group_id=%s&assign=1'
                               + '&manage_tabs_message=%s'
                               ) % ( self.absolute_url(), group_id, message )
                             )

    security.declareProtected( ManageGroups
                             , 'manage_removePrincipalsFromGroup' 
                             )
    def manage_removePrincipalsFromGroup( self
                                        , group_id
                                        , principal_ids
                                        , RESPONSE=None
                                        ):
        """ Remove one or more principals from a group via the ZMI.
        """
        removed = []
        
        for principal_id in principal_ids:
            if self.removePrincipalFromGroup( principal_id, group_id ):
                removed.append( principal_id )

        if not removed:
            message = 'Principals+not+in+group+%s' % group_id
        else:
            message = 'Principals+%s+removed+from+%s' % ( '+'.join( removed )
                                                        , group_id
                                                        )

        if RESPONSE is not None:
            RESPONSE.redirect( ( '%s/manage_groups?group_id=%s&assign=1'
                               + '&manage_tabs_message=%s'
                               ) % ( self.absolute_url(), group_id, message )
                             )
Exemple #54
0
class Package(SubscriptableBaseModel):
    pypi_url = 'http://pypi.python.org/pypi/{}/json'
    subobjects_attr = 'releases'

    def __init__(self, name):
        self.__name__ = name
        self.name = name
        self.releases = OOBTree()

    def __iter__(self):
        return (self.releases[release] for release in self.releases)

    def __getitem__(self, release_name_or_index):
        if isinstance(release_name_or_index, int):
            return next(
                itertools.islice(self.__iter__(), release_name_or_index,
                                 release_name_or_index + 1))
        return self.releases[release_name_or_index]

    def __setitem__(self, key, value):
        key = format_key(key)
        self.releases[key] = value
        self.releases[key].__parent__ = self

    def __delitem__(self, key):
        key = format_key(key)
        del (self.releases[key])

    @classmethod
    @cache_region('pypi', 'get_last_remote_filename')
    def get_last_remote_version(cls, proxy, package_name):
        logger.debug('Not in cache')
        if not proxy:
            return None
        try:
            result = requests.get(
                'http://pypi.python.org/pypi/{}/json'.format(package_name))
            if not result.status_code == 200:
                return None
            result = json.loads(result.content.decode('utf-8'))
            return result['info']['version']
        except ConnectionError:
            pass
        return None

    def repository_is_up_to_date(self, last_remote_release):
        if not last_remote_release:
            return True
        remote_version = parse_version(last_remote_release)

        local_versions = [
            release.version for release in self.releases.values()
        ]
        for version in local_versions:
            if parse_version(version) >= remote_version:
                return True
        return False

    @classmethod
    def by_name(cls, name, request):
        root = repository_root_factory(request)
        return root[name] if name in root else None

    def get_last_release(self):
        if not len(self.releases.items()):
            return None
        max_version = max(
            [parse_version(version) for version in self.releases.keys()])
        for version, release in self.releases.items():
            if parse_version(version) == max_version:
                return release

    @property
    def metadata(self):
        last_release = self.get_last_release()
        if last_release:
            return last_release.metadata
        else:
            return {}
class UnIndex(SimpleItem):
    """Simple forward and reverse index.
    """

    zmi_icon = 'fas fa-info-circle'
    _counter = None
    operators = ('or', 'and')
    useOperator = 'or'
    query_options = ()

    def __init__(self, id, ignore_ex=None, call_methods=None,
                 extra=None, caller=None):
        """Create an unindex

        UnIndexes are indexes that contain two index components, the
        forward index (like plain index objects) and an inverted
        index.  The inverted index is so that objects can be unindexed
        even when the old value of the object is not known.

        e.g.

        self._index = {datum:[documentId1, documentId2]}
        self._unindex = {documentId:datum}

        The arguments are:

          'id' -- the name of the item attribute to index.  This is
          either an attribute name or a record key.

          'ignore_ex' -- should be set to true if you want the index
          to ignore exceptions raised while indexing instead of
          propagating them.

          'call_methods' -- should be set to true if you want the index
          to call the attribute 'id' (note: 'id' should be callable!)
          You will also need to pass in an object in the index and
          uninded methods for this to work.

          'extra' -- a mapping object that keeps additional
          index-related parameters - subitem 'indexed_attrs'
          can be string with comma separated attribute names or
          a list

          'caller' -- reference to the calling object (usually
          a (Z)Catalog instance
        """

        def _get(o, k, default):
            """ return a value for a given key of a dict/record 'o' """
            if isinstance(o, dict):
                return o.get(k, default)
            else:
                return getattr(o, k, default)

        self.id = id
        self.ignore_ex = ignore_ex  # currently unimplemented
        self.call_methods = call_methods

        # allow index to index multiple attributes
        ia = _get(extra, 'indexed_attrs', id)
        if isinstance(ia, str):
            self.indexed_attrs = ia.split(',')
        else:
            self.indexed_attrs = list(ia)
        self.indexed_attrs = [
            attr.strip() for attr in self.indexed_attrs if attr]
        if not self.indexed_attrs:
            self.indexed_attrs = [id]

        self.clear()

    def __len__(self):
        return self._length()

    def getId(self):
        return self.id

    def clear(self):
        self._length = Length()
        self._index = OOBTree()
        self._unindex = IOBTree()

        if self._counter is None:
            self._counter = Length()
        else:
            self._increment_counter()

    def __nonzero__(self):
        return not not self._unindex

    def histogram(self):
        """Return a mapping which provides a histogram of the number of
        elements found at each point in the index.
        """
        histogram = {}
        for item in self._index.items():
            if isinstance(item, int):
                entry = 1  # "set" length is 1
            else:
                key, value = item
                entry = len(value)
            histogram[entry] = histogram.get(entry, 0) + 1
        return histogram

    def referencedObjects(self):
        """Generate a list of IDs for which we have referenced objects."""
        return self._unindex.keys()

    def getEntryForObject(self, documentId, default=_marker):
        """Takes a document ID and returns all the information we have
        on that specific object.
        """
        if default is _marker:
            return self._unindex.get(documentId)
        return self._unindex.get(documentId, default)

    def removeForwardIndexEntry(self, entry, documentId):
        """Take the entry provided and remove any reference to documentId
        in its entry in the index.
        """
        indexRow = self._index.get(entry, _marker)
        if indexRow is not _marker:
            try:
                indexRow.remove(documentId)
                if not indexRow:
                    del self._index[entry]
                    self._length.change(-1)
            except ConflictError:
                raise
            except AttributeError:
                # index row is an int
                try:
                    del self._index[entry]
                except KeyError:
                    # swallow KeyError because it was probably
                    # removed and then _length AttributeError raised
                    pass
                if isinstance(self.__len__, Length):
                    self._length = self.__len__
                    del self.__len__
                self._length.change(-1)
            except Exception:
                LOG.error('%(context)s: unindex_object could not remove '
                          'documentId %(doc_id)s from index %(index)r.  This '
                          'should not happen.', dict(
                              context=self.__class__.__name__,
                              doc_id=documentId,
                              index=self.id),
                          exc_info=sys.exc_info())
        else:
            LOG.error('%(context)s: unindex_object tried to '
                      'retrieve set %(entry)r from index %(index)r '
                      'but couldn\'t.  This should not happen.', dict(
                          context=self.__class__.__name__,
                          entry=entry,
                          index=self.id))

    def insertForwardIndexEntry(self, entry, documentId):
        """Take the entry provided and put it in the correct place
        in the forward index.

        This will also deal with creating the entire row if necessary.
        """
        indexRow = self._index.get(entry, _marker)

        # Make sure there's actually a row there already. If not, create
        # a set and stuff it in first.
        if indexRow is _marker:
            # We always use a set to avoid getting conflict errors on
            # multiple threads adding a new row at the same time
            self._index[entry] = IITreeSet((documentId, ))
            self._length.change(1)
        else:
            try:
                indexRow.insert(documentId)
            except AttributeError:
                # Inline migration: index row with one element was an int at
                # first (before Zope 2.13).
                indexRow = IITreeSet((indexRow, documentId))
                self._index[entry] = indexRow

    def index_object(self, documentId, obj, threshold=None):
        """ wrapper to handle indexing of multiple attributes """

        fields = self.getIndexSourceNames()
        res = 0
        for attr in fields:
            res += self._index_object(documentId, obj, threshold, attr)

        if res > 0:
            self._increment_counter()

        return res > 0

    def _index_object(self, documentId, obj, threshold=None, attr=''):
        """ index and object 'obj' with integer id 'documentId'"""
        returnStatus = 0

        # First we need to see if there's anything interesting to look at
        datum = self._get_object_datum(obj, attr)
        if datum is None:
            # Prevent None from being indexed. None doesn't have a valid
            # ordering definition compared to any other object.
            # BTrees 4.0+ will throw a TypeError
            # "object has default comparison" and won't let it be indexed.
            return 0

        datum = self._convert(datum, default=_marker)

        # We don't want to do anything that we don't have to here, so we'll
        # check to see if the new and existing information is the same.
        oldDatum = self._unindex.get(documentId, _marker)
        if datum != oldDatum:
            if oldDatum is not _marker:
                self.removeForwardIndexEntry(oldDatum, documentId)
                if datum is _marker:
                    try:
                        del self._unindex[documentId]
                    except ConflictError:
                        raise
                    except Exception:
                        LOG.error('%(context)s: oldDatum was there, '
                                  'now it\'s not for documentId %(doc_id)s '
                                  'from index %(index)r.  This '
                                  'should not happen.', dict(
                                      context=self.__class__.__name__,
                                      doc_id=documentId,
                                      index=self.id),
                                  exc_info=sys.exc_info())

            if datum is not _marker:
                self.insertForwardIndexEntry(datum, documentId)
                self._unindex[documentId] = datum

            returnStatus = 1

        return returnStatus

    def _get_object_datum(self, obj, attr):
        # self.id is the name of the index, which is also the name of the
        # attribute we're interested in.  If the attribute is callable,
        # we'll do so.
        try:
            datum = getattr(obj, attr)
            if safe_callable(datum):
                datum = datum()
        except (AttributeError, TypeError):
            datum = _marker
        return datum

    def _increment_counter(self):
        if self._counter is None:
            self._counter = Length()
        self._counter.change(1)

    def getCounter(self):
        """Return a counter which is increased on index changes"""
        return self._counter is not None and self._counter() or 0

    def numObjects(self):
        """Return the number of indexed objects."""
        return len(self._unindex)

    def indexSize(self):
        """Return the size of the index in terms of distinct values."""
        return len(self)

    def unindex_object(self, documentId):
        """ Unindex the object with integer id 'documentId' and don't
        raise an exception if we fail
        """
        unindexRecord = self._unindex.get(documentId, _marker)
        if unindexRecord is _marker:
            return None

        self._increment_counter()

        self.removeForwardIndexEntry(unindexRecord, documentId)
        try:
            del self._unindex[documentId]
        except ConflictError:
            raise
        except Exception:
            LOG.debug('%(context)s: attempt to unindex nonexistent '
                      'documentId %(doc_id)s from index %(index)r. This '
                      'should not happen.', dict(
                          context=self.__class__.__name__,
                          doc_id=documentId,
                          index=self.id),
                      exc_info=True)

    def _apply_not(self, not_parm, resultset=None):
        index = self._index
        setlist = []
        for k in not_parm:
            s = index.get(k, None)
            if s is None:
                continue
            elif isinstance(s, int):
                s = IISet((s, ))
            setlist.append(s)
        return multiunion(setlist)

    def _convert(self, value, default=None):
        return value

    def getRequestCache(self):
        """returns dict for caching per request for interim results
        of an index search. Returns 'None' if no REQUEST attribute
        is available"""

        cache = None
        REQUEST = aq_get(self, 'REQUEST', None)
        if REQUEST is not None:
            catalog = aq_parent(aq_parent(aq_inner(self)))
            if catalog is not None:
                # unique catalog identifier
                key = '_catalogcache_{0}_{1}'.format(
                    catalog.getId(), id(catalog))
                cache = REQUEST.get(key, None)
                if cache is None:
                    cache = REQUEST[key] = RequestCache()

        return cache

    def getRequestCacheKey(self, record, resultset=None):
        """returns an unique key of a search record"""
        params = []

        # record operator (or, and)
        params.append(('operator', record.operator))

        # not / exclude operator
        not_value = record.get('not', None)
        if not_value is not None:
            not_value = frozenset(not_value)
            params.append(('not', not_value))

        # record options
        for op in ['range', 'usage']:
            op_value = record.get(op, None)
            if op_value is not None:
                params.append((op, op_value))

        # record keys
        rec_keys = frozenset(record.keys)
        params.append(('keys', rec_keys))

        # build record identifier
        rid = frozenset(params)

        # unique index identifier
        iid = '_{0}_{1}_{2}'.format(self.__class__.__name__,
                                    self.id, self.getCounter())
        return (iid, rid)

    def _apply_index(self, request, resultset=None):
        """Apply the index to query parameters given in the request arg.

        If the query does not match the index, return None, otherwise
        return a tuple of (result, used_attributes), where used_attributes
        is again a tuple with the names of all used data fields.

        If not `None`, the resultset argument
        indicates that the search result is relevant only on this set,
        i.e. everything outside resultset is of no importance.
        The index can use this information for optimizations.
        """
        record = IndexQuery(request, self.id, self.query_options,
                            self.operators, self.useOperator)
        if record.keys is None:
            return None
        return (self.query_index(record, resultset=resultset), (self.id, ))

    def query_index(self, record, resultset=None):
        """Search the index with the given IndexQuery object.

        If not `None`, the resultset argument
        indicates that the search result is relevant only on this set,
        i.e. everything outside resultset is of no importance.
        The index can use this information for optimizations.
        """
        index = self._index
        r = None
        opr = None

        # not / exclude parameter
        not_parm = record.get('not', None)

        operator = record.operator

        cachekey = None
        cache = self.getRequestCache()
        if cache is not None:
            cachekey = self.getRequestCacheKey(record)
            if cachekey is not None:
                cached = None
                if operator == 'or':
                    cached = cache.get(cachekey, None)
                else:
                    cached_setlist = cache.get(cachekey, None)
                    if cached_setlist is not None:
                        r = resultset
                        for s in cached_setlist:
                            # the result is bound by the resultset
                            r = intersection(r, s)
                            # If intersection, we can't possibly get a
                            # smaller result
                            if not r:
                                break
                        cached = r

                if cached is not None:
                    if isinstance(cached, int):
                        cached = IISet((cached, ))

                    if not_parm:
                        not_parm = list(map(self._convert, not_parm))
                        exclude = self._apply_not(not_parm, resultset)
                        cached = difference(cached, exclude)

                    return cached

        if not record.keys and not_parm:
            # convert into indexed format
            not_parm = list(map(self._convert, not_parm))
            # we have only a 'not' query
            record.keys = [k for k in index.keys() if k not in not_parm]
        else:
            # convert query arguments into indexed format
            record.keys = list(map(self._convert, record.keys))

        # Range parameter
        range_parm = record.get('range', None)
        if range_parm:
            opr = 'range'
            opr_args = []
            if range_parm.find('min') > -1:
                opr_args.append('min')
            if range_parm.find('max') > -1:
                opr_args.append('max')

        if record.get('usage', None):
            # see if any usage params are sent to field
            opr = record.usage.lower().split(':')
            opr, opr_args = opr[0], opr[1:]

        if opr == 'range':  # range search
            if 'min' in opr_args:
                lo = min(record.keys)
            else:
                lo = None
            if 'max' in opr_args:
                hi = max(record.keys)
            else:
                hi = None
            if hi:
                setlist = index.values(lo, hi)
            else:
                setlist = index.values(lo)

            # If we only use one key, intersect and return immediately
            if len(setlist) == 1:
                result = setlist[0]
                if isinstance(result, int):
                    result = IISet((result,))

                if cachekey is not None:
                    if operator == 'or':
                        cache[cachekey] = result
                    else:
                        cache[cachekey] = [result]

                if not_parm:
                    exclude = self._apply_not(not_parm, resultset)
                    result = difference(result, exclude)
                return result

            if operator == 'or':
                tmp = []
                for s in setlist:
                    if isinstance(s, int):
                        s = IISet((s,))
                    tmp.append(s)
                r = multiunion(tmp)

                if cachekey is not None:
                    cache[cachekey] = r
            else:
                # For intersection, sort with smallest data set first
                tmp = []
                for s in setlist:
                    if isinstance(s, int):
                        s = IISet((s,))
                    tmp.append(s)
                if len(tmp) > 2:
                    setlist = sorted(tmp, key=len)
                else:
                    setlist = tmp

                # 'r' is not invariant of resultset. Thus, we
                # have to remember 'setlist'
                if cachekey is not None:
                    cache[cachekey] = setlist

                r = resultset
                for s in setlist:
                    # the result is bound by the resultset
                    r = intersection(r, s)
                    # If intersection, we can't possibly get a smaller result
                    if not r:
                        break

        else:  # not a range search
            # Filter duplicates
            setlist = []
            for k in record.keys:
                if k is None:
                    # Prevent None from being looked up. None doesn't
                    # have a valid ordering definition compared to any
                    # other object. BTrees 4.0+ will throw a TypeError
                    # "object has default comparison".
                    continue
                try:
                    s = index.get(k, None)
                except TypeError:
                    # key is not valid for this Btree so the value is None
                    LOG.error(
                        '%(context)s: query_index tried '
                        'to look up key %(key)r from index %(index)r '
                        'but key was of the wrong type.', dict(
                            context=self.__class__.__name__,
                            key=k,
                            index=self.id,
                        )
                    )
                    s = None
                # If None, try to bail early
                if s is None:
                    if operator == 'or':
                        # If union, we can possibly get a bigger result
                        continue
                    # If intersection, we can't possibly get a smaller result
                    if cachekey is not None:
                        # If operator is 'and', we have to cache a list of
                        # IISet objects
                        cache[cachekey] = [IISet()]
                    return IISet()
                elif isinstance(s, int):
                    s = IISet((s,))
                setlist.append(s)

            # If we only use one key return immediately
            if len(setlist) == 1:
                result = setlist[0]
                if isinstance(result, int):
                    result = IISet((result,))

                if cachekey is not None:
                    if operator == 'or':
                        cache[cachekey] = result
                    else:
                        cache[cachekey] = [result]

                if not_parm:
                    exclude = self._apply_not(not_parm, resultset)
                    result = difference(result, exclude)
                return result

            if operator == 'or':
                # If we already get a small result set passed in, intersecting
                # the various indexes with it and doing the union later is
                # faster than creating a multiunion first.

                if resultset is not None and len(resultset) < 200:
                    smalllist = []
                    for s in setlist:
                        smalllist.append(intersection(resultset, s))
                    r = multiunion(smalllist)

                    # 'r' is not invariant of resultset.  Thus, we
                    # have to remember the union of 'setlist'. But
                    # this is maybe a performance killer. So we do not cache.
                    # if cachekey is not None:
                    #    cache[cachekey] = multiunion(setlist)

                else:
                    r = multiunion(setlist)
                    if cachekey is not None:
                        cache[cachekey] = r
            else:
                # For intersection, sort with smallest data set first
                if len(setlist) > 2:
                    setlist = sorted(setlist, key=len)

                # 'r' is not invariant of resultset. Thus, we
                # have to remember the union of 'setlist'
                if cachekey is not None:
                    cache[cachekey] = setlist

                r = resultset
                for s in setlist:
                    r = intersection(r, s)
                    # If intersection, we can't possibly get a smaller result
                    if not r:
                        break

        if isinstance(r, int):
            r = IISet((r, ))
        if r is None:
            return IISet()
        if not_parm:
            exclude = self._apply_not(not_parm, resultset)
            r = difference(r, exclude)
        return r

    def hasUniqueValuesFor(self, name):
        """has unique values for column name"""
        if name == self.id:
            return 1
        return 0

    def getIndexSourceNames(self):
        """Return sequence of indexed attributes."""
        return getattr(self, 'indexed_attrs', [self.id])

    def getIndexQueryNames(self):
        """Indicate that this index applies to queries for the index's name."""
        return (self.id,)

    def uniqueValues(self, name=None, withLengths=0):
        """returns the unique values for name

        if withLengths is true, returns a sequence of
        tuples of (value, length)
        """
        if name is None:
            name = self.id
        elif name != self.id:
            return

        if not withLengths:
            for key in self._index.keys():
                yield key
        else:
            for key, value in self._index.items():
                if isinstance(value, int):
                    yield (key, 1)
                else:
                    yield (key, len(value))

    def keyForDocument(self, id):
        # This method is superseded by documentToKeyMap
        return self._unindex[id]

    def documentToKeyMap(self):
        return self._unindex

    def items(self):
        items = []
        for k, v in self._index.items():
            if isinstance(v, int):
                v = IISet((v,))
            items.append((k, v))
        return items
class TopicIndex(Persistent, SimpleItem):
    """A TopicIndex maintains a set of FilteredSet objects.

    Every FilteredSet object consists of an expression and and IISet with all
    Ids of indexed objects that eval with this expression to 1.
    """
    implements(ITopicIndex, IPluggableIndex)

    meta_type = "TopicIndex"
    query_options = ('query', 'operator')

    manage_options = ({'label': 'FilteredSets', 'action': 'manage_main'}, )

    def __init__(self, id, caller=None):
        self.id = id
        self.filteredSets = OOBTree()
        self.operators = ('or', 'and')
        self.defaultOperator = 'or'

    def getId(self):
        return self.id

    def clear(self):
        for fs in self.filteredSets.values():
            fs.clear()

    def index_object(self, docid, obj, threshold=100):
        """ hook for (Z)Catalog """
        for fid, filteredSet in self.filteredSets.items():
            filteredSet.index_object(docid, obj)
        return 1

    def unindex_object(self, docid):
        """ hook for (Z)Catalog """
        for fs in self.filteredSets.values():
            try:
                fs.unindex_object(docid)
            except KeyError:
                LOG.debug('Attempt to unindex document'
                          ' with id %s failed' % docid)
        return 1

    def numObjects(self):
        """Return the number of indexed objects."""
        return "n/a"

    def indexSize(self):
        """Return the size of the index in terms of distinct values."""
        return "n/a"

    def search(self, filter_id):
        f = self.filteredSets.get(filter_id, None)
        if f is not None:
            return f.getIds()

    def _apply_index(self, request):
        """hook for (Z)Catalog
        'request' --  mapping type (usually {"topic": "..." }
        """
        record = parseIndexRequest(request, self.id, self.query_options)
        if record.keys is None:
            return None

        operator = record.get('operator', self.defaultOperator).lower()
        if operator == 'or':
            set_func = union
        else:
            set_func = intersection

        res = None
        for filter_id in record.keys:
            rows = self.search(filter_id)
            res = set_func(res, rows)

        if res:
            return res, (self.id, )
        else:
            return IITreeSet(), (self.id, )

    def uniqueValues(self, name=None, withLength=0):
        """ needed to be consistent with the interface """
        return self.filteredSets.keys()

    def getEntryForObject(self, docid, default=_marker):
        """ Takes a document ID and returns all the information we have
            on that specific object.
        """
        return self.filteredSets.keys()

    def addFilteredSet(self, filter_id, typeFilteredSet, expr):
        # Add a FilteredSet object.
        if filter_id in self.filteredSets:
            raise KeyError('A FilteredSet with this name already exists: %s' %
                           filter_id)
        self.filteredSets[filter_id] = factory(
            filter_id,
            typeFilteredSet,
            expr,
        )

    def delFilteredSet(self, filter_id):
        # Delete the FilteredSet object specified by 'filter_id'.
        if filter_id not in self.filteredSets:
            raise KeyError('no such FilteredSet:  %s' % filter_id)
        del self.filteredSets[filter_id]

    def clearFilteredSet(self, filter_id):
        # Clear the FilteredSet object specified by 'filter_id'.
        f = self.filteredSets.get(filter_id, None)
        if f is None:
            raise KeyError('no such FilteredSet: %s' % filter_id)
        f.clear()

    def manage_addFilteredSet(self, filter_id, typeFilteredSet, expr, URL1, \
            REQUEST=None, RESPONSE=None):
        """ add a new filtered set """

        if len(filter_id) == 0:
            raise RuntimeError('Length of ID too short')
        if len(expr) == 0:
            raise RuntimeError('Length of expression too short')

        self.addFilteredSet(filter_id, typeFilteredSet, expr)

        if RESPONSE:
            RESPONSE.redirect(URL1 + '/manage_workspace?'
                              'manage_tabs_message=FilteredSet%20added')

    def manage_delFilteredSet(self,
                              filter_ids=[],
                              URL1=None,
                              REQUEST=None,
                              RESPONSE=None):
        """ delete a list of FilteredSets"""

        for filter_id in filter_ids:
            self.delFilteredSet(filter_id)

        if RESPONSE:
            RESPONSE.redirect(URL1 + '/manage_workspace?'
                              'manage_tabs_message=FilteredSet(s)%20deleted')

    def manage_saveFilteredSet(self,
                               filter_id,
                               expr,
                               URL1=None,
                               REQUEST=None,
                               RESPONSE=None):
        """ save expression for a FilteredSet """

        self.filteredSets[filter_id].setExpression(expr)

        if RESPONSE:
            RESPONSE.redirect(URL1 + '/manage_workspace?'
                              'manage_tabs_message=FilteredSet(s)%20updated')

    def getIndexSourceNames(self):
        """ return names of indexed attributes """
        return ('n/a', )

    def getIndexQueryNames(self):
        return (self.id, )

    def manage_clearFilteredSet(self,
                                filter_ids=[],
                                URL1=None,
                                REQUEST=None,
                                RESPONSE=None):
        """  clear a list of FilteredSets"""

        for filter_id in filter_ids:
            self.clearFilteredSet(filter_id)

        if RESPONSE:
            RESPONSE.redirect(URL1 + '/manage_workspace?'
                              'manage_tabs_message=FilteredSet(s)%20cleared')

    manage = manage_main = DTMLFile('dtml/manageTopicIndex', globals())
    manage_main._setName('manage_main')
    editFilteredSet = DTMLFile('dtml/editFilteredSet', globals())
Exemple #57
0
class Folder(Persistent, Contained):
    """The standard Zope Folder implementation."""

    implements(IContentContainer)

    def __init__(self):
        self.data = OOBTree()

    def keys(self):
        """Return a sequence-like object containing the names
           associated with the objects that appear in the folder
        """
        return self.data.keys()

    def __iter__(self):
        return iter(self.data.keys())

    def values(self):
        """Return a sequence-like object containing the objects that
           appear in the folder.
        """
        return self.data.values()

    def items(self):
        """Return a sequence-like object containing tuples of the form
           (name, object) for the objects that appear in the folder.
        """
        return self.data.items()

    def __getitem__(self, name):
        """Return the named object, or raise ``KeyError`` if the object
           is not found.
        """
        return self.data[name]

    def get(self, name, default=None):
        """Return the named object, or the value of the `default`
           argument if the object is not found.
        """
        return self.data.get(name, default)

    def __contains__(self, name):
        """Return true if the named object appears in the folder."""
        return self.data.has_key(name)

    def __len__(self):
        """Return the number of objects in the folder."""
        return len(self.data)

    def __setitem__(self, name, object):
        """Add the given object to the folder under the given name."""

        if not (isinstance(name, str) or isinstance(name, unicode)):
            raise TypeError("Name must be a string rather than a %s" %
                            name.__class__.__name__)
        try:
            unicode(name)
        except UnicodeError:
            raise TypeError("Non-unicode names must be 7-bit-ascii only")
        if not name:
            raise TypeError("Name must not be empty")

        if name in self.data:
            raise KeyError("name, %s, is already in use" % name)

        setitem(self, self.data.__setitem__, name, object)

    def __delitem__(self, name):
        """Delete the named object from the folder. Raises a KeyError
           if the object is not found."""
        uncontained(self.data[name], self, name)
        del self.data[name]
Exemple #58
0
class BTreeFolder2Base(Persistent):
    """Base for BTree-based folders.
    """

    security = ClassSecurityInfo()

    manage_options = (({
        'label': 'Contents',
        'action': 'manage_main'
    }, ) + Folder.manage_options[1:])

    security.declareProtected(view_management_screens, 'manage_main')
    manage_main = DTMLFile('contents', globals())

    _tree = None  # OOBTree: { id -> object }
    _count = None  # A BTrees.Length
    _v_nextid = 0  # The integer component of the next generated ID
    _mt_index = None  # OOBTree: { meta_type -> OIBTree: { id -> 1 } }
    title = ''

    # superValues() looks for the _objects attribute, but the implementation
    # would be inefficient, so superValues() support is disabled.
    _objects = ()

    def __init__(self, id=None):
        if id is not None:
            self.id = id
        self._initBTrees()

    def _initBTrees(self):
        self._tree = OOBTree()
        self._count = Length()
        self._mt_index = OOBTree()

    def _populateFromFolder(self, source):
        """Fill this folder with the contents of another folder.
        """
        for name in source.objectIds():
            value = source._getOb(name, None)
            if value is not None:
                self._setOb(name, aq_base(value))

    security.declareProtected(view_management_screens, 'manage_fixCount')

    def manage_fixCount(self):
        """Calls self._fixCount() and reports the result as text.
        """
        old, new = self._fixCount()
        path = '/'.join(self.getPhysicalPath())
        if old == new:
            return "No count mismatch detected in BTreeFolder2 at %s." % path
        else:
            return ("Fixed count mismatch in BTreeFolder2 at %s. "
                    "Count was %d; corrected to %d" % (path, old, new))

    def _fixCount(self):
        """Checks if the value of self._count disagrees with
        len(self.objectIds()). If so, corrects self._count. Returns the
        old and new count values. If old==new, no correction was
        performed.
        """
        old = self._count()
        new = len(self.objectIds())
        if old != new:
            self._count.set(new)
        return old, new

    security.declareProtected(view_management_screens, 'manage_cleanup')

    def manage_cleanup(self):
        """Calls self._cleanup() and reports the result as text.
        """
        v = self._cleanup()
        path = '/'.join(self.getPhysicalPath())
        if v:
            return "No damage detected in BTreeFolder2 at %s." % path
        else:
            return ("Fixed BTreeFolder2 at %s.  "
                    "See the log for more details." % path)

    def _cleanup(self):
        """Cleans up errors in the BTrees.

        Certain ZODB bugs have caused BTrees to become slightly insane.
        Fortunately, there is a way to clean up damaged BTrees that
        always seems to work: make a new BTree containing the items()
        of the old one.

        Returns 1 if no damage was detected, or 0 if damage was
        detected and fixed.
        """
        from BTrees.check import check
        path = '/'.join(self.getPhysicalPath())
        try:
            check(self._tree)
            for key in self._tree.keys():
                if key not in self._tree:
                    raise AssertionError("Missing value for key: %s" %
                                         repr(key))
            check(self._mt_index)
            for key, value in self._mt_index.items():
                if (key not in self._mt_index
                        or self._mt_index[key] is not value):
                    raise AssertionError(
                        "Missing or incorrect meta_type index: %s" % repr(key))
                check(value)
                for k in value.keys():
                    if k not in value:
                        raise AssertionError(
                            "Missing values for meta_type index: %s" %
                            repr(key))
            return 1
        except AssertionError:
            LOG.warn('Detected damage to %s. Fixing now.' % path,
                     exc_info=sys.exc_info())
            try:
                self._tree = OOBTree(self._tree)
                mt_index = OOBTree()
                for key, value in self._mt_index.items():
                    mt_index[key] = OIBTree(value)
                self._mt_index = mt_index
            except:
                LOG.error('Failed to fix %s.' % path, exc_info=sys.exc_info())
                raise
            else:
                LOG.info('Fixed %s.' % path)
            return 0

    def _getOb(self, id, default=_marker):
        """Return the named object from the folder.
        """
        tree = self._tree
        if default is _marker:
            ob = tree[id]
            return ob.__of__(self)
        else:
            ob = tree.get(id, _marker)
            if ob is _marker:
                return default
            else:
                return ob.__of__(self)

    security.declareProtected(access_contents_information, 'get')

    def get(self, name, default=None):
        return self._getOb(name, default)

    def __getitem__(self, name):
        return self._getOb(name)

    def __getattr__(self, name):
        # Boo hoo hoo!  Zope 2 prefers implicit acquisition over traversal
        # to subitems, and __bobo_traverse__ hooks don't work with
        # restrictedTraverse() unless __getattr__() is also present.
        # Oh well.
        res = self._tree.get(name)
        if res is None:
            raise AttributeError(name)
        return res

    def _setOb(self, id, object):
        """Store the named object in the folder.
        """
        tree = self._tree
        if id in tree:
            raise KeyError('There is already an item named "%s".' % id)
        tree[id] = object
        self._count.change(1)
        # Update the meta type index.
        mti = self._mt_index
        meta_type = getattr(object, 'meta_type', None)
        if meta_type is not None:
            ids = mti.get(meta_type, None)
            if ids is None:
                ids = OIBTree()
                mti[meta_type] = ids
            ids[id] = 1

    def _delOb(self, id):
        """Remove the named object from the folder.
        """
        tree = self._tree
        meta_type = getattr(tree[id], 'meta_type', None)
        del tree[id]
        self._count.change(-1)
        # Update the meta type index.
        if meta_type is not None:
            mti = self._mt_index
            ids = mti.get(meta_type, None)
            if ids is not None and id in ids:
                del ids[id]
                if not ids:
                    # Removed the last object of this meta_type.
                    # Prune the index.
                    del mti[meta_type]

    security.declareProtected(view_management_screens, 'getBatchObjectListing')

    def getBatchObjectListing(self, REQUEST=None):
        """Return a structure for a page template to show the list of objects.
        """
        if REQUEST is None:
            REQUEST = {}
        pref_rows = int(REQUEST.get('dtpref_rows', 20))
        b_start = int(REQUEST.get('b_start', 1))
        b_count = int(REQUEST.get('b_count', 1000))
        b_end = b_start + b_count - 1
        url = self.absolute_url() + '/manage_main'
        idlist = self.objectIds()  # Pre-sorted.
        count = self.objectCount()

        if b_end < count:
            next_url = url + '?b_start=%d' % (b_start + b_count)
        else:
            b_end = count
            next_url = ''

        if b_start > 1:
            prev_url = url + '?b_start=%d' % max(b_start - b_count, 1)
        else:
            prev_url = ''

        formatted = []
        formatted.append(listtext0 % pref_rows)
        for i in range(b_start - 1, b_end):
            optID = escape(idlist[i])
            formatted.append(listtext1 % (escape(optID, quote=1), optID))
        formatted.append(listtext2)
        return {
            'b_start': b_start,
            'b_end': b_end,
            'prev_batch_url': prev_url,
            'next_batch_url': next_url,
            'formatted_list': ''.join(formatted)
        }

    security.declareProtected(view_management_screens,
                              'manage_object_workspace')

    def manage_object_workspace(self, ids=(), REQUEST=None):
        '''Redirects to the workspace of the first object in
        the list.'''
        if ids and REQUEST is not None:
            REQUEST.RESPONSE.redirect('%s/%s/manage_workspace' %
                                      (self.absolute_url(), quote(ids[0])))
        else:
            return self.manage_main(self, REQUEST)

    security.declareProtected(access_contents_information, 'tpValues')

    def tpValues(self):
        """Ensures the items don't show up in the left pane.
        """
        return ()

    security.declareProtected(access_contents_information, 'objectCount')

    def objectCount(self):
        """Returns the number of items in the folder."""
        return self._count()

    def __len__(self):
        return self.objectCount()

    def __nonzero__(self):
        return True

    security.declareProtected(access_contents_information, 'has_key')

    def has_key(self, id):
        """Indicates whether the folder has an item by ID.
        """
        return id in self._tree

    # backward compatibility
    hasObject = has_key

    security.declareProtected(access_contents_information, 'objectIds')

    def objectIds(self, spec=None):
        # Returns a list of subobject ids of the current object.
        # If 'spec' is specified, returns objects whose meta_type
        # matches 'spec'.

        if spec is None:
            return self._tree.keys()

        if isinstance(spec, str):
            spec = [spec]

        set = None
        mti = self._mt_index
        for meta_type in spec:
            ids = mti.get(meta_type, None)
            if ids is not None:
                set = union(set, ids)
        if set is None:
            return ()
        else:
            return set.keys()

    security.declareProtected(access_contents_information, 'keys')

    def keys(self):
        return self._tree.keys()

    def __contains__(self, name):
        return name in self._tree

    def __iter__(self):
        return iter(self.objectIds())

    security.declareProtected(access_contents_information, 'objectValues')

    def objectValues(self, spec=None):
        # Returns a list of actual subobjects of the current object.
        # If 'spec' is specified, returns only objects whose meta_type
        # match 'spec'.
        if spec is None:
            return LazyMap(self._getOb, self._tree.keys())
        return LazyMap(self._getOb, self.objectIds(spec))

    security.declareProtected(access_contents_information, 'values')

    def values(self):
        return LazyMap(self._getOb, self._tree.keys())

    security.declareProtected(access_contents_information, 'objectItems')

    def objectItems(self, spec=None):
        # Returns a list of (id, subobject) tuples of the current object.
        # If 'spec' is specified, returns only objects whose meta_type match
        # 'spec'
        if spec is None:
            return LazyMap(lambda id, _getOb=self._getOb: (id, _getOb(id)),
                           self._tree.keys())
        return LazyMap(lambda id, _getOb=self._getOb: (id, _getOb(id)),
                       self.objectIds(spec))

    security.declareProtected(access_contents_information, 'items')

    def items(self):
        return LazyMap(lambda id, _getOb=self._getOb: (id, _getOb(id)),
                       self._tree.keys())

    security.declareProtected(access_contents_information, 'objectMap')

    def objectMap(self):
        # Returns a tuple of mappings containing subobject meta-data.
        return LazyMap(
            lambda (k, v): {
                'id': k,
                'meta_type': getattr(v, 'meta_type', None)
            }, self._tree.items(), self._count())

    security.declareProtected(access_contents_information, 'objectIds_d')

    def objectIds_d(self, t=None):
        ids = self.objectIds(t)
        res = {}
        for id in ids:
            res[id] = 1
        return res

    security.declareProtected(access_contents_information, 'objectMap_d')

    def objectMap_d(self, t=None):
        return self.objectMap()

    def _checkId(self, id, allow_dup=0):
        if not allow_dup and id in self:
            raise BadRequestException('The id "%s" is invalid--'
                                      'it is already in use.' % id)

    def _setObject(self,
                   id,
                   object,
                   roles=None,
                   user=None,
                   set_owner=1,
                   suppress_events=False):
        ob = object  # better name, keep original function signature
        v = self._checkId(id)
        if v is not None:
            id = v

        # If an object by the given id already exists, remove it.
        if id in self:
            self._delObject(id)

        if not suppress_events:
            notify(ObjectWillBeAddedEvent(ob, self, id))

        self._setOb(id, ob)
        ob = self._getOb(id)

        if set_owner:
            # TODO: eventify manage_fixupOwnershipAfterAdd
            # This will be called for a copy/clone, or a normal _setObject.
            ob.manage_fixupOwnershipAfterAdd()

            # Try to give user the local role "Owner", but only if
            # no local roles have been set on the object yet.
            if getattr(ob, '__ac_local_roles__', _marker) is None:
                user = getSecurityManager().getUser()
                if user is not None:
                    userid = user.getId()
                    if userid is not None:
                        ob.manage_setLocalRoles(userid, ['Owner'])

        if not suppress_events:
            notify(ObjectAddedEvent(ob, self, id))
            notifyContainerModified(self)

        compatibilityCall('manage_afterAdd', ob, ob, self)

        return id

    def __setitem__(self, key, value):
        return self._setObject(key, value)

    def _delObject(self, id, dp=1, suppress_events=False):
        ob = self._getOb(id)

        compatibilityCall('manage_beforeDelete', ob, ob, self)

        if not suppress_events:
            notify(ObjectWillBeRemovedEvent(ob, self, id))

        self._delOb(id)

        if not suppress_events:
            notify(ObjectRemovedEvent(ob, self, id))
            notifyContainerModified(self)

    def __delitem__(self, name):
        return self._delObject(id=name)

    # Utility for generating unique IDs.

    security.declareProtected(access_contents_information, 'generateId')

    def generateId(self, prefix='item', suffix='', rand_ceiling=999999999):
        """Returns an ID not used yet by this folder.

        The ID is unlikely to collide with other threads and clients.
        The IDs are sequential to optimize access to objects
        that are likely to have some relation.
        """
        tree = self._tree
        n = self._v_nextid
        attempt = 0
        while 1:
            if n % 4000 != 0 and n <= rand_ceiling:
                id = '%s%d%s' % (prefix, n, suffix)
                if id not in tree:
                    break
            n = randint(1, rand_ceiling)
            attempt = attempt + 1
            if attempt > MAX_UNIQUEID_ATTEMPTS:
                # Prevent denial of service
                raise ExhaustedUniqueIdsError
        self._v_nextid = n + 1
        return id
class PasswordResetTool(UniqueObject, SimpleItem):
    """Provides a default implementation for a password reset scheme.

    From a 'forgotten password' template, you submit your username to
    a handler script that does a 'requestReset', and sends an email
    with an unguessable unique hash in a url as built by 'constructURL'
    to the user.

    The user visits that URL (the 'reset form') and enters their username,
    """

    id = 'portal_password_reset'
    meta_type = 'Password Reset Tool'

    security = ClassSecurityInfo()

    def __init__(self):
        self._requests = OOBTree()

    # Internal attributes
    _user_check = True
    _timedelta = 7  # DAYS

    # Interface fulfillment ##
    @security.protected(ManagePortal)
    def requestReset(self, userid):
        """Ask the system to start the password reset procedure for
        user 'userid'.

        Returns a dictionary with the random string that must be
        used to reset the password in 'randomstring', the expiration date
        as a datetime in 'expires', and the userid (for convenience) in
        'userid'. Returns None if no such user.
        """
        if not self.getValidUser(userid):
            return None
        randomstring = self.uniqueString(userid)
        expiry = self.expirationDate()
        self._requests[randomstring] = (userid, expiry)

        # clear out untouched records more than 10 days old
        self.clearExpired(10)

        # this is a cheap sort of "automatic" clearing
        self._p_changed = 1

        retval = {}
        retval['randomstring'] = randomstring
        retval['expires'] = expiry
        retval['userid'] = userid
        return retval

    @security.public
    def resetPassword(self, userid, randomstring, password):
        """Set the password (in 'password') for the user who maps to
        the string in 'randomstring' iff the entered 'userid' is equal
        to the mapped userid. (This can be turned off with the
        'toggleUserCheck' method.)

        Note that this method will *not* check password validity: this
        must be done by the caller.

        Throws an 'ExpiredRequestError' if request is expired.
        Throws an 'InvalidRequestError' if no such record exists,
        or 'userid' is not in the record.
        """
        if get_member_by_login_name:
            found_member = get_member_by_login_name(self,
                                                    userid,
                                                    raise_exceptions=False)
            if found_member is not None:
                userid = found_member.getId()
        try:
            stored_user, expiry = self._requests[randomstring]
        except KeyError:
            raise InvalidRequestError

        if self.checkUser() and (userid != stored_user):
            raise InvalidRequestError
        if self.expired(expiry):
            del self._requests[randomstring]
            self._p_changed = 1
            raise ExpiredRequestError

        member = self.getValidUser(stored_user)
        if not member:
            raise InvalidRequestError

        # actually change password
        user = member.getUser()
        uf = getToolByName(self, 'acl_users')
        uf.userSetPassword(user.getUserId(), password)
        member.setMemberProperties(dict(must_change_password=0))

        # clean out the request
        del self._requests[randomstring]
        self._p_changed = 1

    @security.protected(ManagePortal)
    def setExpirationTimeout(self, timedelta):
        """Set the length of time a reset request will be valid in days.
        """
        self._timedelta = abs(timedelta)

    @security.public
    def getExpirationTimeout(self):
        """Get the length of time a reset request will be valid.
        """
        return self._timedelta

    @security.public
    def checkUser(self):
        """Returns a boolean representing the state of 'user check' as described
        in 'toggleUserCheck'. True means on, and is the default."""
        return self._user_check

    @security.public
    def verifyKey(self, key):
        """Verify a key. Raises an exception if the key is invalid or expired.
        """
        try:
            u, expiry = self._requests[key]
        except KeyError:
            raise InvalidRequestError

        if self.expired(expiry):
            raise ExpiredRequestError

        if not self.getValidUser(u):
            raise InvalidRequestError('No such user')

    @security.private
    def clearExpired(self, days=0):
        """Destroys all expired reset request records.
        Parameter controls how many days past expired it must be to disappear.
        """
        now = datetime.datetime.utcnow()
        for key, record in self._requests.items():
            stored_user, expiry = record
            if self.expired(expiry, now - datetime.timedelta(days=days)):
                del self._requests[key]
                self._p_changed = 1

    # customization points

    @security.private
    def uniqueString(self, userid):
        """Returns a string that is random and unguessable, or at
        least as close as possible.

        This is used by 'requestReset' to generate the auth
        string. Override if you wish different format.

        This implementation ignores userid and simply generates a
        UUID. That parameter is for convenience of extenders, and
        will be passed properly in the default implementation.
        """
        uuid_generator = getUtility(IUUIDGenerator)
        return uuid_generator()

    @security.private
    def expirationDate(self):
        """Returns a DateTime for exipiry of a request from the
        current time.

        This is used by housekeeping methods (like clearEpired)
        and stored in reset request records."""
        return (datetime.datetime.utcnow() +
                datetime.timedelta(days=self._timedelta))

    @security.private
    def getValidUser(self, userid):
        """Returns the member with 'userid' if available and None otherwise."""
        if get_member_by_login_name:
            registry = getUtility(IRegistry)
            settings = registry.forInterface(ISecuritySchema, prefix='plone')
            if settings.use_email_as_login:
                return get_member_by_login_name(self,
                                                userid,
                                                raise_exceptions=False)
        membertool = getToolByName(self, 'portal_membership')
        return membertool.getMemberById(userid)

    @security.private
    def expired(self, dt, now=None):
        """Tells whether a DateTime or timestamp 'datetime' is expired
        with regards to either 'now', if provided, or the current
        time."""
        if not now:
            now = datetime.datetime.utcnow()
        return now >= dt
Exemple #60
0
class StructureIndex(persistent.Persistent):
    """Stores information about the relationships between objects.

    It allows you to determine the parent-child relationship between two
    objects without having to materialize the objects in memory.

    """

    zope.interface.implements(gocept.objectquery.interfaces.IStructureIndex)

    def __init__(self, root):
        self.paths = OOBTree()
        self.insert(None, root)

    def index(self, obj):
        recursive_unindex = []
        child_paths = []
        for child in IChildrenInspector(obj)():
            new_paths = self.insert(obj, child)
            loops = [path for path in new_paths if overlong_loop(path)]
            if not loops:
                recursive_unindex.extend(self.index(child))
            child_paths.extend(new_paths)

        # Remove paths not found anymore
        for path_set, path in self.paths_traversing_obj(obj):
            for candidate in child_paths:
                if path_starts_with(path, candidate):
                    # This is a valid path that is still referenced.
                    break
            else:
                # This path is not referenced anymore.
                path_set.remove(path)

        # Mark objects that have not paths anymore for unindexing
        local_unindex = []
        for candidate, path_set in self.paths.items():
            if len(path_set) > 0:
                continue
            local_unindex.append(candidate)
        # Delete buckets. XXX Maybe don't delete buckets. This might be
        # a conflict hotspot.
        for candidate in local_unindex:
            del self.paths[candidate]
        return (recursive_unindex +
                [obj._p_jar.get(candidate) for candidate in local_unindex])

    def paths_traversing_obj(self, obj):
        """List all paths that touch the given obj and traverse *past*
        it."""
        for path_set in self.paths.values():
            for path in list(path_set):
                if obj._p_oid in path[:-1]:
                    yield path_set, path

    def insert(self, parent, child):
        # Establish a new path to child_id for each path that leads to
        # parent_id.
        child_id = child._p_oid
        new_paths = []
        for parent_path in self.get_paths(parent):
            new_paths.append(parent_path + (child_id, ))
        if not self.paths.has_key(child_id):
            self.paths[child_id] = OOTreeSet()
        for path in new_paths:
            self.paths[child_id].insert(path)
        return new_paths

    def is_child(self, child, parent):
        """ Check if key1 is a direct successor of key2. """
        if not child or not parent:
            return True  # empty keys return True (see KCJoin)
        for elem1 in self.paths.get(child, []):
            for elem2 in self.paths.get(parent):
                if len(elem1) == len(elem2) + 1 and \
                    self._check_path(elem2, elem1):
                    return True
        return False

    def is_successor(self, child, parent):
        """ Check if key1 is a successor of key2. """
        for elem1 in self.paths.get(child):
            for elem2 in self.paths.get(parent):
                if self._check_path(elem2, elem1):
                    return True
        return False

    def get_paths(self, obj):
        """Return all paths that lead to the given object."""
        if obj is None:
            # `None` is a special parent for the root object causing the
            # root path to be expressed as get_paths(None) + (root._p_oid)
            return [()]
        try:
            return self.paths[obj._p_oid]
        except KeyError:
            return []

    def _check_path(self, path1, path2):
        """ Check if path1 is reachable by path2. """
        if len(path1) > len(path2):
            return False
        for i in range(len(path1)):
            if path1[i] != path2[i]:
                return False
        return True