Esempio n. 1
0
class DefinitionBase(SignatureSchemaContext):
    """Base implementation class for form definitions, groups"""
    def __init__(self, content_base):
        self.content_base = content_base
        SignatureSchemaContext.__init__(self)  # sets self.signature=None
        self.signature_history = PersistentList()

    def schema_version(self, signature):
        signature = str(signature.strip())
        if signature not in self.signature_history:
            return -1
        return self.signature_history.index(signature) + 1  # one-indexed

    def __getattr__(self, name):
        """Hack to get acquisition and Python property self.schema to work"""
        if name == 'schema':
            return self.__class__.schema.__get__(self)
        # fall back to base class(es) __getattr__ from DexterityContent
        return self.content_base.__getattr__(self, name)

    def __getitem__(self, name):
        """low-tech traversal hook"""
        if name == 'edit_schema':
            title = u'Form schema: %s' % self.title
            temp_schema = copy_schema(self.schema)  # edit copy, not in-place!
            schema_context = SchemaContext(temp_schema, self.REQUEST, name,
                                           title).__of__(self)
            return schema_context
        return self.content_base.__getitem__(self, name)
Esempio n. 2
0
class GenericContainer(grok.Container):
    def __init__(self, name, id=None):
        super(GenericContainer, self).__init__()
        self.name = name
        self.id = id
        self.item_count = 0
        self.item_order = PersistentList()

    def items(self):
        t = []
        for key in self.item_order: t.append(self[unicode(key)])
        return t

    def inverse_items(self):
        t = self.items()
        t.reverse()
        return t

    def delete(self,id):
        self.item_order.pop(self.item_order.index(int(id)))

    def add_item(self,item):
        self[unicode(self.item_count)] = item
        self.item_order.append(self.item_count)
        self.item_count += 1

    def max(self):
        if self.items() is not None:
            return self.items()[-1]
    
    def min(self):
        if self.items is not None:
            return self.items()[0]
Esempio n. 3
0
class DefinitionBase(SignatureSchemaContext):
    """Base implementation class for form definitions, groups"""

    def __init__(self, content_base):
        self.content_base = content_base
        SignatureSchemaContext.__init__(self)  # sets self.signature=None
        self.signature_history = PersistentList()

    def schema_version(self, signature):
        signature = str(signature.strip())
        if signature not in self.signature_history:
            return -1
        return self.signature_history.index(signature) + 1  # one-indexed

    def __getattr__(self, name):
        """Hack to get acquisition and Python property self.schema to work"""
        if name == 'schema':
            return self.__class__.schema.__get__(self)
        # fall back to base class(es) __getattr__ from DexterityContent
        return self.content_base.__getattr__(self, name)

    def __getitem__(self, name):
        """low-tech traversal hook"""
        if name == 'edit_schema':
            title = u'Form schema: %s' % self.title
            temp_schema = copy_schema(self.schema)  # edit copy, not in-place!
            schema_context = SchemaContext(
                temp_schema, self.REQUEST, name, title).__of__(self)
            return schema_context
        return self.content_base.__getitem__(self, name)
Esempio n. 4
0
class HasMessages:
    """Mixin class providing message/inbox management."""
    
    __persistenceVersion = 1
    
    _purge_age = timedelta(days=7)      # days messages stay in trash
    _trash_age = timedelta(days=30)     # days messages stay in inbox
    
    def __init__(self):
        self.__message_list = PersistentList()
        
    def upgradeToVersion1(self):
        pl = PersistentList(self.__message_list)
        self.__message_list = pl
        
    def has_messages(self):
        return len(self.__message_list) > 0
        
    def new_messages(self):
        return [msg for msg in self.__message_list \
            if msg.status == 'new']
            
    def old_messages(self):
        return [msg for msg in self.__message_list \
            if msg.status == 'read']

    def deleted_messages(self):
        return [msg for msg in self.__message_list \
            if msg.status == 'deleted']
            
    def add_message(self, msg):
        self.__message_list.append(msg)
        
    def trash_old_messages(self):
        """Call periodically to move old messages, read and undread, to trash."""
        cutoff = datetime.utcnow() - self._trash_age
        for msg in self.__message_list:
            if msg.status != 'deleted':
                if msg.date < cutoff:
                    msg.delete()
    
    def purge_old_messages(self):
        """Call periodically to permanently delete old messages out of trash."""
        cutoff = datetime.utcnow() - self._trash_age - self._purge_age
        for msg in self.deleted_messages():
            if (msg.date_opened or msg.date) < cutoff:
                self.__message_list.remove(msg)
            
    def message_index(self, msg):
        return self.__message_list.index(msg)
        
    def get_message(self, index):
        return self.__message_list[index]
Esempio n. 5
0
    def checkTheWorld(self):
        # Test constructors
        u = PersistentList()
        u0 = PersistentList(l0)
        u1 = PersistentList(l1)
        u2 = PersistentList(l2)

        uu = PersistentList(u)
        uu0 = PersistentList(u0)
        uu1 = PersistentList(u1)
        uu2 = PersistentList(u2)

        v = PersistentList(tuple(u))

        class OtherList:
            def __init__(self, initlist):
                self.__data = initlist

            def __len__(self):
                return len(self.__data)

            def __getitem__(self, i):
                return self.__data[i]

        v0 = PersistentList(OtherList(u0))
        vv = PersistentList("this is also a sequence")

        # Test __repr__
        eq = self.assertEqual

        eq(str(u0), str(l0), "str(u0) == str(l0)")
        eq(repr(u1), repr(l1), "repr(u1) == repr(l1)")
        eq(repr(u2), repr(l2), "repr(u2) == repr(l2)")

        # Test __cmp__ and __len__

        # Py3: No cmp() or __cmp__ anymore.
        if PY2:

            def mycmp(a, b):
                r = cmp(a, b)
                if r < 0: return -1
                if r > 0: return 1
                return r

            all = [l0, l1, l2, u, u0, u1, u2, uu, uu0, uu1, uu2]
            for a in all:
                for b in all:
                    eq(mycmp(a, b), mycmp(len(a), len(b)),
                       "mycmp(a, b) == mycmp(len(a), len(b))")

        # Test __getitem__

        for i in range(len(u2)):
            eq(u2[i], i, "u2[i] == i")

        # Test __setitem__

        uu2[0] = 0
        uu2[1] = 100
        try:
            uu2[2] = 200
        except IndexError:
            pass
        else:
            self.fail("uu2[2] shouldn't be assignable")

        # Test __delitem__

        del uu2[1]
        del uu2[0]
        try:
            del uu2[0]
        except IndexError:
            pass
        else:
            self.fail("uu2[0] shouldn't be deletable")

        # Test __getslice__

        for i in range(-3, 4):
            eq(u2[:i], l2[:i], "u2[:i] == l2[:i]")
            eq(u2[i:], l2[i:], "u2[i:] == l2[i:]")
            for j in range(-3, 4):
                eq(u2[i:j], l2[i:j], "u2[i:j] == l2[i:j]")

        # Test __setslice__

        for i in range(-3, 4):
            u2[:i] = l2[:i]
            eq(u2, l2, "u2 == l2")
            u2[i:] = l2[i:]
            eq(u2, l2, "u2 == l2")
            for j in range(-3, 4):
                u2[i:j] = l2[i:j]
                eq(u2, l2, "u2 == l2")

        uu2 = u2[:]
        uu2[:0] = [-2, -1]
        eq(uu2, [-2, -1, 0, 1], "uu2 == [-2, -1, 0, 1]")
        uu2[0:] = []
        eq(uu2, [], "uu2 == []")

        # Test __contains__
        for i in u2:
            self.assertTrue(i in u2, "i in u2")
        for i in min(u2) - 1, max(u2) + 1:
            self.assertTrue(i not in u2, "i not in u2")

        # Test __delslice__

        uu2 = u2[:]
        del uu2[1:2]
        del uu2[0:1]
        eq(uu2, [], "uu2 == []")

        uu2 = u2[:]
        del uu2[1:]
        del uu2[:1]
        eq(uu2, [], "uu2 == []")

        # Test __add__, __radd__, __mul__ and __rmul__

        #self.assertTrue(u1 + [] == [] + u1 == u1, "u1 + [] == [] + u1 == u1")
        self.assertTrue(u1 + [1] == u2, "u1 + [1] == u2")
        #self.assertTrue([-1] + u1 == [-1, 0], "[-1] + u1 == [-1, 0]")
        self.assertTrue(u2 == u2 * 1 == 1 * u2, "u2 == u2*1 == 1*u2")
        self.assertTrue(u2 + u2 == u2 * 2 == 2 * u2, "u2+u2 == u2*2 == 2*u2")
        self.assertTrue(u2 + u2 + u2 == u2 * 3 == 3 * u2,
                        "u2+u2+u2 == u2*3 == 3*u2")

        # Test append

        u = u1[:]
        u.append(1)
        eq(u, u2, "u == u2")

        # Test insert

        u = u2[:]
        u.insert(0, -1)
        eq(u, [-1, 0, 1], "u == [-1, 0, 1]")

        # Test pop

        u = PersistentList([0, -1, 1])
        u.pop()
        eq(u, [0, -1], "u == [0, -1]")
        u.pop(0)
        eq(u, [-1], "u == [-1]")

        # Test remove

        u = u2[:]
        u.remove(1)
        eq(u, u1, "u == u1")

        # Test count
        u = u2 * 3
        eq(u.count(0), 3, "u.count(0) == 3")
        eq(u.count(1), 3, "u.count(1) == 3")
        eq(u.count(2), 0, "u.count(2) == 0")

        # Test index

        eq(u2.index(0), 0, "u2.index(0) == 0")
        eq(u2.index(1), 1, "u2.index(1) == 1")
        try:
            u2.index(2)
        except ValueError:
            pass
        else:
            self.fail("expected ValueError")

        # Test reverse

        u = u2[:]
        u.reverse()
        eq(u, [1, 0], "u == [1, 0]")
        u.reverse()
        eq(u, u2, "u == u2")

        # Test sort

        u = PersistentList([1, 0])
        u.sort()
        eq(u, u2, "u == u2")

        # Test extend

        u = u1[:]
        u.extend(u2)
        eq(u, u1 + u2, "u == u1 + u2")
Esempio n. 6
0
class ToManyRelationship(ToManyRelationshipBase):
    """
    ToManyRelationship manages the ToMany side of a bi-directional relation
    between to objects.  It does not return values for any of the object*
    calls defined on ObjectManager so that Zope can still work with its
    containment assumptions.  It provides object*All calles that return
    its object in the same way that ObjectManager does.

    Related references are maintained in a list.
    """

    __pychecker__='no-override'

    meta_type = "ToManyRelationship"

    security = ClassSecurityInfo()

    def __init__(self, id):
        """ToManyRelationships use an array to store related objects"""
        self.id = id
        self._objects = PersistentList()


    def __call__(self):
        """when we are called return our related object in our aq context"""
        return self.objectValuesAll()


    def hasobject(self, obj):
        "check to see if we have this object"
        try:
            idx = self._objects.index(obj)
            return self._objects[idx]
        except ValueError:
            return None


    def manage_pasteObjects(self, cb_copy_data=None, REQUEST=None):
        """ToManyRelationships link instead of pasting"""
        return self.manage_linkObjects(cb_copy_data=cb_copy_data,
                                        REQUEST=REQUEST)


    def _add(self,obj):
        """add an object to one side of this toMany relationship"""
        if obj in self._objects: raise RelationshipExistsError
        self._objects.append(aq_base(obj))
        self.__primary_parent__._p_changed = True


    def _remove(self, obj=None, suppress_events=False):
        """remove object from our side of a relationship"""
        if obj:
            try:
                self._objects.remove(obj)
            except ValueError:
                raise ObjectNotFound(
                    "object %s not found on relation %s" % (
                        obj.getPrimaryId(), self.getPrimaryId()))
        else:
            self._objects = PersistentList()
        self.__primary_parent__._p_changed = True


    def _remoteRemove(self, obj=None):
        """remove an object from the far side of this relationship
        if no object is passed in remove all objects"""
        if obj:
            if obj not in self._objects:
                raise ObjectNotFound("object %s not found on relation %s" % (
                            obj.getPrimaryId(), self.getPrimaryId()))
            objs = [obj]
        else: objs = self.objectValuesAll()
        remoteName = self.remoteName()
        for obj in objs:
            rel = getattr(obj, remoteName)
            try:
                rel._remove(self.__primary_parent__)
            except ObjectNotFound:
                message = log_tb(sys.exc_info())
                log.error('Remote remove failed. Run "zenchkrels -r -x1". ' + message)


    def _setObject(self,id,object,roles=None,user=None,set_owner=1):
        """Set and object onto a ToMany by calling addRelation"""
        unused(id, roles, user, set_owner)
        self.addRelation(object)


    def _delObject(self, id, dp=1, suppress_events=False):
        """
        Delete object by its absolute id (ie /zport/dmd/bla/bla)
        (this is sent out in the object*All API)
        """
        obj = getObjByPath(self, id)
        self.removeRelation(obj, suppress_events=suppress_events)


    def _getOb(self, id, default=zenmarker):
        """
        Return object based on its primaryId. plain id will not work!!!
        """
        objs = filter(lambda x: x.getPrimaryId() == id, self._objects)
        if len(objs) == 1: return objs[0].__of__(self)
        if default != zenmarker: return default
        raise AttributeError(id)


    def objectIdsAll(self):
        """
        Return object ids as their absolute primaryId.
        """
        return [obj.getPrimaryId() for obj in self._objects]


    def objectIds(self, spec=None):
        """
        ToManyRelationship doesn't publish objectIds to prevent
        zope recursion problems.
        """
        unused(spec)
        return []


    security.declareProtected('View', 'objectValuesAll')
    def objectValuesAll(self):
        """return all related object values"""
        return list(self.objectValuesGen())


    def objectValuesGen(self):
        """Generator that returns all related objects."""
        return (obj.__of__(self) for obj in self._objects)


    def objectValues(self, spec=None):
        """
        ToManyRelationship doesn't publish objectValues to prevent
        zope recursion problems.
        """
        unused(spec)
        return []


    def objectItemsAll(self):
        """
        Return object items where key is primaryId.
        """
        return [(obj.getPrimaryId(), obj) for obj in self._objects]


    def objectItems(self, spec=None):
        """
        ToManyRelationship doesn't publish objectItems to prevent
        zope recursion problems.
        """
        unused(spec)
        return []


    def _getCopy(self, container):
        """
        create copy and link remote objects if remote side is TO_MANY
        """
        rel = self.__class__(self.id)
        rel.__primary_parent__ = container
        rel = rel.__of__(container)
        norelcopy = getattr(self, 'zNoRelationshipCopy', [])
        if self.id in norelcopy: return rel
        if self.remoteTypeName() == "ToMany":
            for robj in self.objectValuesAll():
                rel.addRelation(robj)
        return rel


    def exportXml(self,ofile,ignorerels=[]):
        """Return an xml representation of a ToManyRelationship
        <tomany id='interfaces'>
            <link>/Systems/OOL/Mail</link>
        </tomany>
        """
        if self.countObjects() == 0: return
        ofile.write("<tomany id='%s'>\n" % self.id)
        for id in self.objectIdsAll():
            ofile.write("<link objid='%s'/>\n" % id)
        ofile.write("</tomany>\n")


    def all_meta_types(self, interfaces=None):
        """Return empty list not allowed to add objects to a ToManyRelation"""
        return []


    def convertToPersistentList(self):
        self._objects = PersistentList(self._objects)


    def checkObjectRelation(self, obj, remoteName, parentObject, repair):
        deleted = False
        try:
            ppath = obj.getPrimaryPath()
            getObjByPath(self, ppath)
        except (KeyError, NotFound):
            log.error("object %s in relation %s has been deleted " \
                         "from its primary path",
                         obj.getPrimaryId(), self.getPrimaryId())
            if repair:
                log.warn("removing object %s from relation %s",
                         obj.getPrimaryId(), self.getPrimaryId())
                self._objects.remove(obj)
                self.__primary_parent__._p_changed = True
                deleted = True

        if not deleted:
            rrel = getattr(obj, remoteName)
            if not rrel.hasobject(parentObject):
                log.error("remote relation %s doesn't point back to %s",
                                rrel.getPrimaryId(), self.getPrimaryId())
                if repair:
                    log.warn("reconnecting relation %s to relation %s",
                            rrel.getPrimaryId(),self.getPrimaryId())
                    rrel._add(parentObject)
        return deleted


    def checkRelation(self, repair=False):
        """Check to make sure that relationship bidirectionality is ok.
        """
        if len(self._objects):
            log.debug("checking relation: %s", self.id)

        # look for objects that don't point back to us
        # or who should no longer exist in the database
        rname = self.remoteName()
        parobj = self.getPrimaryParent()
        for obj in self._objects:
            self.checkObjectRelation(obj, rname, parobj, repair)

        # find duplicate objects
        keycount = {}
        for obj in self._objects:
            key = obj.getPrimaryId()
            c = keycount.setdefault(key, 0)
            c += 1
            keycount[key] = c
        # Remove duplicate objects or objects that don't exist
        for key, val in keycount.items():
            if val > 1:
                log.critical("obj:%s rel:%s dup found obj:%s count:%s",
                             self.getPrimaryId(), self.id, key, val)
                if repair:
                    log.critical("repair key %s", key)
                    self._objects = [ o for o in self._objects \
                                        if o.getPrimaryId() != key ]
                    try:
                        obj = self.getObjByPath(key)
                        self._objects.append(obj)
                    except KeyError:
                        log.critical("obj %s not found in database", key)
Esempio n. 7
0
class RecordContainer(Persistent):
    """
    Base/default record container uses PersistentDict for entry storage
    and PersistentList to store ordered keys.  This base container class
    does not advocate one place of storage for the container in a ZODB
    over another, so subclass implementations may choose to implement a
    container within a placeful (e.g. OFS or CMF Content item) or placeless
    (local utility) storage context.  Only a placeless context is supported
    by direct users of this class (without subclassing).

    For a container with hundreds of items or more, consider using instead
    BTreeRecordContainer as an implementation or base class, as it should
    handle memory usage and insert performance much better for larger sets
    of records.

    Usage
    -----

    RecordContainer acts as CRUD controller for working with records.

    The RecordContainer is an addressible object in the system, either as a
    registered utility (or with a subclass as "contentish" (CMF) content).

    Records themselves are not content, but data that are possibly
    non-atomic elements of an atomic content item (if the container is
    implemented in a subclass of RecordContainer as contentish).

    Usage:
    ------

    We need a record container object:

    >>> from uu.record.base import Record, RecordContainer
    >>> container = RecordContainer()
    >>> from uu.record.interfaces import IRecordContainer
    >>> assert IRecordContainer.providedBy(container)

    Record containers have length and containment checks:

    >>> assert len(container) == 0
    >>> import uuid  # keys for entries are stringified UUIDs
    >>> randomuid = str(uuid.uuid4())
    >>> assert randomuid not in container
    >>> assert container.get(randomuid, None) is None

    And they have keys/values/items methods like a mapping:

    >>> assert container.keys() == ()
    >>> assert container.values() == ()
    >>> assert container.items() == ()  # of course, these are empty now.

    Before we add records to a container, we need to create them; there are
    two possible ways to do this:

    >>> from uu.record.base import Record
    >>> entry1 = Record()
    >>> entry2 = container.create()  # preferred factory

    Both factory mechanisms create an entry item with a record_uid attribute:

    >>> from uu.record.interfaces import IRecord
    >>> assert IRecord.providedBy(entry1)
    >>> assert IRecord.providedBy(entry2)
    >>> is_uuid = lambda u: isinstance(u, str) and len(u) == 36
    >>> assert is_uuid(entry1.record_uid)
    >>> assert is_uuid(entry2.record_uid)

    And, these are RFC 4122 UUIDs, so even randomly generated 128-bit ids
    have near zero chance of collision:

    >>> assert entry1.record_uid != entry2.record_uid
    >>> assert entry2.record_uid != randomuid

    The record objects provide plone.uuid.interfaces.IAttributeUUID as an
    alternative way to get the UUID value (string representation) by
    adapting to IUUID:

    >>> from zope.configuration import xmlconfig
    >>> import plone.uuid
    >>> c = xmlconfig.file('configure.zcml', plone.uuid)  # load registrations
    >>> from plone.uuid.interfaces import IUUID, IAttributeUUID
    >>> from zope.component import queryAdapter
    >>> assert IAttributeUUID.providedBy(entry1)
    >>> assert queryAdapter(entry1, IUUID) is not None
    >>> assert queryAdapter(entry1, IUUID) == entry1.record_uid

    Now when we have a parent context with a schema, the created entries will
    be signed with the schema and provide it.

    RecordContainer.create() is the preferred factory when processing data.
    This is because it can take a mapping of keys/values, and copy each
    field name/value onto object attributes -- if and only if the attribute
    in question matches a type whitelist and a name blacklist filter.

    >>> entry4 = container.create(data={'record_uid':randomuid})
    >>> assert entry4.record_uid == randomuid
    >>> entry5 = container.create(data={'count':5})
    >>> assert entry5.count == 5
    >>> entry6 = container.create(data={'_bad_name'    : True,
    ...                                  'count'        : 2,
    ...                                  'bad_value'    : lambda x: x })
    >>> assert not hasattr(entry6, '_bad_name')  # no leading underscores
    >>> assert entry6.count == 2
    >>> assert not hasattr(entry6, 'bad_value')  # function not copied!

    Of course, merely using the record container object as a factory for
    new records does not mean they are stored within (yet):

    >>> assert entry4.record_uid not in container
    >>> assert entry4.record_uid not in container.keys()

    Let's add an item:

    >>> container.add(entry4)

    There are two ways to check for containment, by either key or value:

    >>> assert entry4 in container
    >>> assert entry4.record_uid in container

    We can get records using a (limited, read) mapping-like interface:

    >>> assert len(container) == 1  # we just added the first entry
    >>> assert container.values()[0] is entry4
    >>> assert container.get(entry4.record_uid) is entry4
    >>> assert container[entry4.record_uid] is entry4

    We can deal with references to entries also NOT in the container:

    >>> import uuid
    >>> randomuid = str(uuid.uuid4())
    >>> assert randomuid not in container
    >>> assert container.get(str(uuid.uuid4()), None) is None
    >>> assert entry1.record_uid not in container

    And we can check containment on either an instance or a UID; checking on
    an instance is just a convenience that uses its UID (record_uid) field
    to check for actual containment:

    >>> assert entry4.record_uid in container
    >>> assert entry4 in container  # shortcut!

    However, it should be noted for good measure:

    >>> assert entry4 in container.values()
    >>> assert entry4.record_uid in container.keys()
    >>> assert entry4 not in container.keys()  # of course!
    >>> assert (entry4.record_uid, entry4) in container.items()

    We can modify a record contained directly; this is the most direct and
    low-level update interface for any entry:

    >>> _marker = object()
    >>> assert getattr(entry4, 'title', _marker) is _marker
    >>> entry4.title = u'Curious George'
    >>> assert container.get(entry4.record_uid).title == u'Curious George'

    We can add another record:

    >>> container.add(entry6)
    >>> assert entry6 in container
    >>> assert entry6.record_uid in container
    >>> assert len(container) == 2

    Keys, values, items are always ordered; since we added entry4, then
    entry6 previously, they will return in that order:

    >>> expected_order = (entry4, entry6)
    >>> expected_uid_order = tuple([e.record_uid for e in expected_order])
    >>> expected_items_order = tuple(zip(expected_uid_order, expected_order))
    >>> assert tuple(container.keys()) == expected_uid_order
    >>> assert tuple(container.values()) == expected_order
    >>> assert tuple(container.items()) == expected_items_order

    We can re-order this; let's move entry6 up to position 0 (first):

    >>> container.reorder(entry6, offset=0)
    >>> expected_order = (entry6, entry4)
    >>> expected_uid_order = tuple([e.record_uid for e in expected_order])
    >>> expected_items_order = tuple(zip(expected_uid_order, expected_order))
    >>> assert tuple(container.keys()) == expected_uid_order
    >>> assert tuple(container.values()) == expected_order
    >>> assert tuple(container.items()) == expected_items_order

    We can also re-order by UID instead of record/entry reference:

    >>> container.reorder(entry6.record_uid, offset=1)  # where it was before
    >>> expected_order = (entry4, entry6)
    >>> expected_uid_order = tuple([e.record_uid for e in expected_order])
    >>> expected_items_order = tuple(zip(expected_uid_order, expected_order))
    >>> assert tuple(container.keys()) == expected_uid_order
    >>> assert tuple(container.values()) == expected_order
    >>> assert tuple(container.items()) == expected_items_order

    And we can remove records from containment by UID or by reference (note,
    del(container[key]) uses __delitem__ since a container is a writable
    mapping):

    >>> del(container[entry6])
    >>> assert entry6 not in container
    >>> assert entry6.record_uid not in container
    >>> assert len(container) == 1
    >>> assert entry4 in container
    >>> del(container[entry4.record_uid])
    >>> assert entry4 not in container
    >>> assert len(container) == 0

    Earlier, direct update of objects was demonstrated: get an object and
    modify its properties.  This attribute-setting mechanism is the best
    low-level interface, but it does not (a) support a wholesale update
    from either a field dictionary/mapping nor another object providing
    IRecord needing its data to be copied; nor (b) support notification
    of zope.lifecycle object events.

    Given these needs, a high level interface for update exists, with the
    record object acting as a controller for updating contained entries.
    This provides for update via another entry (a field-by-field copy) or
    from a data dictionary/mapping.

    >>> newuid = str(uuid.uuid4())
    >>> data = {    'record_uid' : newuid,
    ...             'title'      : u'George',
    ...             'count'      : 9,
    ...        }
    >>> assert len(container) == 0  # empty, nothing in there yet!
    >>> assert newuid not in container

    Note, update() returns an entry; return value can be ignored if caller
    deems it not useful.

    >>> entry = container.update(data)
    >>> assert newuid in container  # update implies adding!
    >>> assert entry is container.get(newuid)
    >>> assert entry.title == data['title']
    >>> assert entry.count == data['count']

    Now, the entry we just modified was also added.  We can modify it again:

    >>> data = {    'record_uid' : newuid,
    ...             'title'      : u'Curious George',
    ...             'count'      : 2,
    ...        }
    >>> entry = container.update(data)
    >>> assert newuid in container     # same uid
    >>> entry.title
    u'Curious George'
    >>> entry.count
    2
    >>> assert len(container) == 1     # same length, nothing new was added.

    We could also create a stand-in entry for which data is copied to the
    permanent entry with the same UUID on update:

    >>> temp_entry = container.create()
    >>> temp_entry.record_uid = newuid      # overwrite with the uid of entry
    >>> temp_entry.title = u'Monkey jumping on the bed'
    >>> temp_entry.count = 0

    temp_entry is a stand-in which we will pass to update(), when we really
    intend to modify entry (they have the same UID):

    >>> real_entry = container.update(temp_entry)
    >>> assert container.get(newuid) is not temp_entry
    >>> assert container.get(newuid) is entry  # still the same object...
    >>> assert container.get(newuid) is real_entry
    >>> entry.title                             # ...but data is modified!
    u'Monkey jumping on the bed'
    >>> entry.count
    0
    >>> assert len(container) == 1     # same length, nothing new was added.


    JSON integration
    ----------------

    As a convenience, update_all() parses JSON into a data dict for use by
    update(), using the Python 2.6 json library (aka/was: simplejson):

    >>> party_form = RecordContainer()
    >>> entry = party_form.create()
    >>> party_form.add(entry)
    >>> data = {  # mock data we'll serialize to JSON
    ...     'record_uid': entry.record_uid,  # which record to update
    ...     'name'      : 'Me',
    ...     'birthday'  : u'77/06/01',
    ...     'party_time': u'11/06/05 12:00',
    ...     }
    >>> import json  # requires Python >= 2.6
    >>> data['name'] = 'Chunky monkey'
    >>> serialized = json.dumps([data,], indent=2)  # JSON array of one item...
    >>> print serialized  # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
    [
      {
        "party_time": "11/06/05 12:00",
        "birthday": "77/06/01",
        "name": "Chunky monkey",
        "record_uid": "..."
      }
    ]

    The JSON created above is useful enough for demonstration, despite being
    only a single-item list.

    >>> assert getattr(entry, 'name', _marker) is _marker  # before, no attr
    >>> party_form.update_all(serialized)
    >>> entry.name  # after update
    u'Chunky monkey'

    update_all() also takes a singular record, not just a JSON array:

    >>> data['name'] = 'Curious George'
    >>> serialized = json.dumps(data, indent=2)  # JSON object, not array.
    >>> print serialized  # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
    {
      "party_time": "11/06/05 12:00",
      "birthday": "77/06/01",
      "name": "Curious George",
      "record_uid": "..."
    }
    >>> entry.name  # before
    u'Chunky monkey'
    >>> party_form.update_all(serialized)
    >>> entry.name  # after update
    u'Curious George'

    JSON parsing also supports a "bundle" or wrapper object around a list of
    entries, where the wrapper contains metadata about the form itself, not
    its entries (currently, this is just the process_changes field, which
    is sourced from the JSON bundle/wrapper object field called 'notes').
    When wrapped, the list of entries is named 'entries' inside the wrapper.

    >>> data['name'] = u'Party monkey'
    >>> serialized = json.dumps({'notes'    : 'something changed',
    ...                          'entries'  : [data,]},
    ...                         indent=2)  # JSON array of one item...
    >>> entry.name  # before
    u'Curious George'
    >>> party_form.update_all(serialized)
    >>> entry.name  # after
    u'Party monkey'

    It should be noted that update_all() removes entries not in the data
    payload, and it preserves the order contained in the JSON entries.

    Object events
    -------------

    CRUD methods on a controlling object should have some means of extension,
    pluggable to code that should subscribe to CRUD (object lifecycle) events.
    We notify four distinct zope.lifecycleevent object event types:

    1. Object created (zope.lifecycleevent.interfaces.IObjectCreatedEvent)

    2. Object addded to container:
        (zope.lifecycleevent.interfaces.IObjectAddedEvent).

    3. Object modified (zope.lifecycleevent.interfaces.IObjectModifiedEvent)

    4. Object removed (zope.lifecycleevent.interfaces.IObjectRemovedEvent)

    Note: the create() operation both creates and modifies: as such, both
    created and modified events are fired off, and since most creations also
    are followed by an add() to a container, you may have three events to
    subscribe to early in a new entry's lifecycle.

    First, some necessary imports of events and the @adapter decorator:

    >>> from zope.component import adapter
    >>> from zope.lifecycleevent import IObjectCreatedEvent
    >>> from zope.lifecycleevent import IObjectModifiedEvent
    >>> from zope.lifecycleevent import IObjectRemovedEvent
    >>> from zope.lifecycleevent import IObjectAddedEvent

    Let's define dummy handlers:

    >>> @adapter(IRecord, IObjectCreatedEvent)
    ... def handle_create(context, event):
    ...     print 'object created'
    ...
    >>> @adapter(IRecord, IObjectModifiedEvent)
    ... def handle_modify(context, event):
    ...     print 'object modified'
    ...
    >>> @adapter(IRecord, IObjectRemovedEvent)
    ... def handle_remove(context, event):
    ...     print 'object removed'
    ...
    >>> @adapter(IRecord, IObjectAddedEvent)
    ... def handle_add(context, event):
    ...     print 'object added'
    ...

    Next, let's configure zope.event to use zope.component event
    subscribers; most frameworks using zope.lifecycleevent already do
    this, but we will configure this explicitly for documentation
    and testing purposes, only if not already enabled:

    >>> import zope.event
    >>> from zope.component import getGlobalSiteManager
    >>> gsm = getGlobalSiteManager()

    Importing zope.component.event puts dispatch() in zope.event.subscribers:

    >>> from zope.component import event
    >>> assert event.dispatch in zope.event.subscribers
    
    Now, let's register the handlers:

    >>> for h in (handle_create, handle_modify, handle_remove, handle_add):
    ...     gsm.registerHandler(h)
    ...

    Usually, these handlers will be registered in the global site manager
    via ZCML and zope.configuration, but they are registered in Python
    above for documentation/testing purposes.

    We can watch these event handlers get fired when CRUD methods are called.

    Object creation, with and without data:

    >>> newentry = container.create()      # should print 'object created'
    object created
    >>> another_uid = str(uuid.uuid4())
    >>> newentry = container.create({'count':88})
    object modified
    object created

    Object addition:

    >>> container.add(newentry)
    object added
    >>>

    Object removal:

    >>> del(container[newentry.record_uid])  # via __delitem__()
    object removed

    Object update (existing object):

    >>> entry = container.values()[0]
    >>> entry = container.update({'record_uid' : entry.record_uid,
    ...                            'title'      : u'Me'})
    object modified

    Object modified (new object or not contained):

    >>> random_uid = str(uuid.uuid4())
    >>> entry = container.update({'record_uid' : random_uid,
    ...                            'title'      : u'Bananas'})
    object modified
    object created
    object added

    Event handlers for modification can know what fields are modified; let's
    create a more interesting modification handler that prints the names of
    changed fields.

    >>> from zope.lifecycleevent.interfaces import IAttributes
    >>> unregistered = gsm.unregisterHandler(handle_modify)
    >>> @adapter(IRecord, IObjectModifiedEvent)
    ... def handle_modify(context, event):
    ...     if event.descriptions:
    ...         attr_desc = [d for d in event.descriptions
    ...                         if (IAttributes.providedBy(d))]
    ...         if attr_desc:
    ...             field_names = attr_desc[0].attributes
    ...         print tuple(field_names)
    >>> gsm.registerHandler(handle_modify)

    >>> entry = container.values()[0]
    >>> entry = container.update({'record_uid' : entry.record_uid,
    ...                            'title'      : u'Hello'})
    ('title',)

    Finally, clean up and remove all the dummy handlers:
    >>> for h in (handle_create, handle_modify, handle_remove, handle_add):
    ...     success = gsm.unregisterHandler(h)
    ...

    """

    implements(IRecordContainer)

    # whitelist types of objects to copy on data update:

    TYPE_WHITELIST = (
        int,
        long,
        str,
        unicode,
        bool,
        float,
        time.time,
        datetime,
        date,
        timedelta,
        decimal.Decimal,
    )

    SEQUENCE_WHITELIST = (
        list,
        tuple,
        set,
        frozenset,
        PersistentList,
    )

    MAPPING_WHITELIST = (
        dict,
        PersistentDict,
    )

    RECORD_INTERFACE = IRecord

    factory = Record

    def __init__(self, factory=Record, _impl=PersistentDict):
        self._entries = _impl()
        self._order = PersistentList()
        self.factory = factory

    # IWriteContainer methods:

    def _update_size(self):
        self._size = len(self._order)
        self._p_changed = True

    def __setitem__(self, key, value):
        if isinstance(key, uuid.UUID) or isinstance(key, unicode):
            key = str(key)
        elif not (isinstance(key, str) and len(key) == 36):
            raise KeyError('key does not appear to be string UUID: %s', key)
        if not self.RECORD_INTERFACE.providedBy(value):
            raise ValueError('Record value must provide %s' %
                             (self.RECORD_INTERFACE.__identifier__))
        self._entries[key] = value
        if key not in self._order:
            self._order.append(key)
            self._update_size()

    def __delitem__(self, record):
        uid = record
        if self.RECORD_INTERFACE.providedBy(record):
            uid = str(record.record_uid)
        elif isinstance(record, uuid.UUID):
            uid = str(record)
        if not (isinstance(uid, str) and len(uid) == 36):
            raise ValueError('record neither record object nor UUID')
        if uid not in self._entries:
            raise ValueError('record not found contained within')
        if uid in self._order:
            self._order.remove(uid)
            self._update_size()
        if not self.RECORD_INTERFACE.providedBy(record):
            record = self._entries.get(uid)  # need ref for event notify below
        del (self._entries[uid])
        notify(ObjectRemovedEvent(record, self, uid))

    # IRecordContainer and IOrdered re-ordering methods:

    def reorder(self, record, offset):
        """
        Reorder a record (either UUID or object with record_uid attribute)
        in self._order, if record exists.  If no UUID exists in self._order,
        raise a ValueError.  Offset must be non-negative integer.
        """
        uid = record
        offset = abs(int(offset))
        if self.RECORD_INTERFACE.providedBy(record):
            uid = record.record_uid
        if not uid or uid not in self._order:
            raise ValueError('cannot find record to move for id %s' % uid)
        self._order.insert(offset, self._order.pop(self._order.index(uid)))

    def updateOrder(self, order):
        """Provides zope.container.interfaces.IOrdered.updateOrder"""
        if len(order) != len(self._order):
            raise ValueError('invalid number of keys')
        s_order = set(order)
        if len(order) != len(s_order):
            raise ValueError('duplicate keys in order')
        if s_order - set(self._order):
            raise ValueError('unknown key(s) provided in order')
        if not isinstance(order, PersistentList):
            order = PersistentList(order)
        self._order = order

    # IReadContainer interface methods:

    def get(self, uid, default=None):
        """
        Get object providing IRecord for given UUID uid or return None
        """
        if self.RECORD_INTERFACE.providedBy(uid):
            uid = uid.record_uid  # special case to support __contains__() impl
        v = self._entries.get(str(uid), default)
        if v and getattr(v, '_v_parent', None) is None:
            v._v_parent = self  # container marks item with itself as context
        return v

    def __contains__(self, record):
        """
        Given record as either IRecord object or UUID, is record contained?
        """
        if self.RECORD_INTERFACE.providedBy(record):
            return self.get(record, None) is not None
        return str(record) in self._entries

    def __len__(self):
        """
        return length of record entries
        """
        size = getattr(aq_base(self), '_size', None)
        return size if size is not None else len(self._order)

    def __getitem__(self, key):
        """Get item by UID key"""
        v = self.get(key, None)
        if v is None:
            raise KeyError('unknown UID for record entry')
        return v

    def keys(self):
        """return tuple with elements ordered"""
        return tuple(self._order)

    def values(self):
        """return tuple of records in order"""
        return tuple([t[1] for t in self.items()])

    def items(self):
        """return ordered pairs of key/values"""
        return tuple([(uid, self.get(uid)) for uid in self._order])

    def __iter__(self):
        return self._order.__iter__()

    # IRecordContainer-specific CRUD methods:

    def _type_whitelist_validation(self, value):
        vtype = type(value)
        if vtype in self.MAPPING_WHITELIST:
            for k, v in value.items():
                if not (k in self.TYPE_WHITELIST and v in self.TYPE_WHITELIST):
                    raise ValueError('Unsupported mapping key/value type')
        elif vtype in self.SEQUENCE_WHITELIST:
            for v in value:
                if v not in self.TYPE_WHITELIST:
                    raise ValueError('Unsupported sequence value type')
        else:
            if vtype not in self.TYPE_WHITELIST:
                raise ValueError('Unsupported data type')

    def _populate_record(self, record, data):
        """
        Given mapping of data, copy values to attributes on record.

        Subclasses may override to provide schema validation, selective
        copy of names, and normalization of values if/as necessary.
        """
        changelog = []
        for key, value in data.items():
            if key.startswith('_'):
                continue  # invalid key
            if key == 'record_uid':
                self.record_uid = str(value)
                continue
            try:
                self._type_whitelist_validation(value)
            except ValueError:
                continue  # skip problem name!
            existing_value = getattr(self, key, None)
            if value != existing_value:
                changelog.append(key)
                setattr(record, key, value)
        if changelog:
            record._p_changed = True
            changelog = [
                Attributes(self.RECORD_INTERFACE, name) for name in changelog
            ]
            notify(ObjectModifiedEvent(record, *changelog))

    def create(self, data=None):
        """
        Alternative factory for an IRecord object, does not store object.
        If data is not None, copy fields from data.
        """
        if data is None:
            data = {}
        uid = data.get('record_uid', str(uuid.uuid4()))  # get or random uuid
        record = self.factory(context=self, uid=uid)
        if data and (hasattr(data, 'get') and hasattr(data, 'items')):
            self._before_populate(record, data)
            self._populate_record(record, data)
        notify(ObjectCreatedEvent(record))
        return record

    def add(self, record):
        """
        Add a record to container, append UUID to end of order; over-
        write existing entry if already exists for a UUID (in such case
        leave order as-is).
        """
        uid = str(record.record_uid)
        if not uid:
            raise ValueError('record has empty UUID')
        self._entries[uid] = record
        if uid not in self._order:
            self._order.append(uid)
            self._update_size()
        notify(ObjectAddedEvent(record, self, uid))

    def _ad_hoc_fieldlist(self, record):
        attrs = [name for name in dir(record) if not name.startswith('_')]
        fieldnames = []
        for name in attrs:
            v = getattr(record, name)
            try:
                self._type_whitelist_validation(v)
                fieldnames.append(name)
            except ValueError:
                pass  # ignore name
        return fieldnames

    def _filtered_data(self, data):
        fieldnames = self._ad_hoc_fieldlist(data)
        if IRecord.providedBy(data):
            return dict([(k, getattr(data, k, None)) for k in fieldnames])
        return dict([(k, data.get(k, None)) for k in fieldnames])

    def _before_populate(self, record, data):
        pass  # hook for subclasses

    def _before_update_notification(self, record, data):
        pass  # hook for subclasses

    def notify_data_changed(self):
        notify(ObjectModifiedEvent(self, Attributes(IRecordContainer,
                                                    'items')))

    def update(self, data, suppress_notify=False):
        """
        Given data, which may be a dict of field key/values or an actual
        IRecord providing object, update existing entry given a UUID, or
        add the entry if an entry for that UUID does not yet exist.  The
        update should copy all values for every key provided.  Specialized
        or schema-bound subclasses of this interface may execute more
        elaborate rules on what data is copied and how it is normalized.

        Pre-condition:

          * All new (added) entries updated this way must contain a record_uid
            field with a string UUID.

        Post-condition:

          * New items should always be handled through self.create() and then
            self.add().

          * Method returns modified record.

          * Should notify at least zope.lifecycleevent.IObjectModifiedEvent,
            (if changes, detection of which is left up to implementation).

          * On creation of new records, should notify both
            IObjectCreatedEvent and IObjectAddedEvent (the record container
            is the context of record).

        """
        if self.RECORD_INTERFACE.providedBy(data):
            uid = data.record_uid
            data = self._filtered_data(data)
        else:
            uid = data.get('record_uid', None)
        if uid is None:
            raise ValueError('empty record UID on update')
        uid = str(uid)
        record = self.get(uid, None)
        if record is not None:
            # existing record, already known/saved
            self._before_populate(record, data)
            self._populate_record(record, data)  # also notifies modified event
        else:
            # new, create, then add
            record = self.create(data)  # notifies created, modified for record
            self.add(record)  # notified added event
        self._before_update_notification(record, data)
        if (not suppress_notify) and getattr(record, '_p_changed', None):
            self.notify_data_changed()
        return record

    def _process_container_metadata(self, data):
        return False  # hook for subclasses

    def update_all(self, data):
        """
        Given sequence of data dictionaries or a JSON serialization
        thereof, update each item.  Raises ValueError on missing UID of
        any item/entry.  Also supports JSON serialization of a single
        record/entry dict.
        """
        _modified = False
        if isinstance(data, basestring):
            _data = json.loads(data)
            if isinstance(_data, dict):
                # dict might be singluar item, or wrapping object; a wrapping
                # object would have a list called 'entries'
                if 'entries' in _data and isinstance(_data['entries'], list):
                    _modified = self._process_container_metadata(_data)
                    # wrapper, get entries from within.
                    _data = _data['entries']
                else:
                    # singular record, not a wrapper
                    _data = [_data]  # wrap singular item update in list
            _keynorm = lambda o: dict([(str(k), v) for k, v in o.items()])
            data = [_keynorm(o) for o in _data]
        uids = [str(o['record_uid']) for o in data]
        existing_uids = set(self.keys())
        added_uids = set(uids) - existing_uids
        modified_uids = set(uids).intersection(existing_uids)
        for entry_data in data:
            if 'record_uid' not in entry_data:
                raise ValueError('record missing UID')
            record = self.update(entry_data, suppress_notify=True)
            if not _modified and getattr(record, '_p_changed', None):
                _modified = True
        remove_uids = existing_uids - set(uids)
        for deluid in remove_uids:
            del (self[deluid])  # remove any previous entries not in the form
        self._order = PersistentList(uids)  # replace old with new uid order
        if added_uids or modified_uids:
            _modified = True
        if data and _modified:
            self.notify_data_changed()  # notify just once
Esempio n. 8
0
class Igrac(Persistent):
    def __init__(self, nadimak, jeLiRacunalo, igra):
        self.nadimak = nadimak
        self.karte = PersistentList()
        self.igra = igra
        self.jeLiRacunalo = jeLiRacunalo
        self.zastavice = PersistentDict()

        self.zastavice.update({
            'uzmiKarte': 0,
            'provjeriZvanja': 0,
            'hocuLiZvati': 0,
            'baciKartu': 0
        })

        self.igra.onSudjeluj(self)
        transaction.commit()

    def uzmiKarte(self):
        global vidljiveKarteSprites
        global karteSpritesList
        self.karte.extend(self.igra.onDajKarte())
        self.karte = self.sortirajKarte(self.karte)
        for i in range(len(self.karte)):
            kartaSprite = next((x for x in karteSpritesList
                                if x.karta.slika == self.karte[i].slika), None)
            kartaSprite.pozicioniraj(
                (1000 - (100 * len(self.karte))) / 2 + 100 * i, 566)
            kartaSprite.layer = i
            kartaSprite.prikazi()
            vidljiveKarteSprites.add(kartaSprite)

    def sortirajKarte(self, karte):
        return sorted(karte,
                      key=lambda karta: (karta.boja, karta.poredak),
                      reverse=False)

    def provjeriZvanja(self):
        self.igra.onPrijaviZvanje(self, self.karte)

    def hocuLiZvati(self, moramLiZvati):
        if self.jeLiRacunalo == True:
            jacinaAduta = {'Herc': 0, 'Bundeva': 0, 'Zelena': 0, 'Zir': 0}
            for karta in self.karte:
                if karta.boja == 'Herc':
                    jacinaAduta['Herc'] += karta.vrijednostAduta
                elif karta.boja == 'Bundeva':
                    jacinaAduta['Bundeva'] += karta.vrijednostAduta
                elif karta.boja == 'Zelena':
                    jacinaAduta['Zelena'] += karta.vrijednostAduta
                elif karta.boja == 'Zir':
                    jacinaAduta['Zir'] += karta.vrijednostAduta

            najjacaBoja = max(jacinaAduta, key=jacinaAduta.get)

            if jacinaAduta[najjacaBoja] > 30 or moramLiZvati:
                print self.nadimak + ": zovem " + najjacaBoja
                self.igra.onOdaberiAdut(najjacaBoja)
            else:
                print self.nadimak + ": dalje!"
                self.igra.onOdaberiAdut(False)
                return False

    def baciKartu(self, odabranaKarta=None):
        if (self.jeLiRacunalo == True):
            for karta in self.karte:
                if self.igra.onJeLiPoPravilima(self.karte, karta) == True:
                    time.sleep(.01)
                    print self.nadimak + ": ", karta
                    self.karte.remove(karta)
                    self.igra.onBaciKartu(karta)
                else:
                    continue
        else:
            if self.igra.onJeLiPoPravilima(self.karte, odabranaKarta) == True:
                self.zastavice["baciKartu"] = 0
                self.igra.onBaciKartu(
                    self.karte.pop(self.karte.index(odabranaKarta)))
                return True
            else:
                return False
Esempio n. 9
0
    def checkTheWorld(self):
        # Test constructors
        u = PersistentList()
        u0 = PersistentList(l0)
        u1 = PersistentList(l1)
        u2 = PersistentList(l2)

        uu = PersistentList(u)
        uu0 = PersistentList(u0)
        uu1 = PersistentList(u1)
        uu2 = PersistentList(u2)

        v = PersistentList(tuple(u))
        class OtherList(object):
            def __init__(self, initlist):
                self.__data = initlist
            def __len__(self):
                return len(self.__data)
            def __getitem__(self, i):
                return self.__data[i]
        v0 = PersistentList(OtherList(u0))
        vv = PersistentList("this is also a sequence")

        # Test __repr__
        eq = self.assertEqual

        eq(str(u0), str(l0), "str(u0) == str(l0)")
        eq(repr(u1), repr(l1), "repr(u1) == repr(l1)")
        eq(repr(u2), repr(l2), "repr(u2) == repr(l2)")

        # Test __cmp__ and __len__

        # Py3: No cmp() or __cmp__ anymore.
        if PY2:
            def mycmp(a, b):
                r = cmp(a, b)
                if r < 0: return -1
                if r > 0: return 1
                return r

            all = [l0, l1, l2, u, u0, u1, u2, uu, uu0, uu1, uu2]
            for a in all:
                for b in all:
                    eq(mycmp(a, b), mycmp(len(a), len(b)),
                          "mycmp(a, b) == mycmp(len(a), len(b))")

        # Test __getitem__

        for i in range(len(u2)):
            eq(u2[i], i, "u2[i] == i")

        # Test __setitem__

        uu2[0] = 0
        uu2[1] = 100
        try:
            uu2[2] = 200
        except IndexError:
            pass
        else:
            self.fail("uu2[2] shouldn't be assignable")

        # Test __delitem__

        del uu2[1]
        del uu2[0]
        try:
            del uu2[0]
        except IndexError:
            pass
        else:
            self.fail("uu2[0] shouldn't be deletable")

        # Test __getslice__

        for i in range(-3, 4):
            eq(u2[:i], l2[:i], "u2[:i] == l2[:i]")
            eq(u2[i:], l2[i:], "u2[i:] == l2[i:]")
            for j in range(-3, 4):
                eq(u2[i:j], l2[i:j], "u2[i:j] == l2[i:j]")

        # Test __setslice__

        for i in range(-3, 4):
            u2[:i] = l2[:i]
            eq(u2, l2, "u2 == l2")
            u2[i:] = l2[i:]
            eq(u2, l2, "u2 == l2")
            for j in range(-3, 4):
                u2[i:j] = l2[i:j]
                eq(u2, l2, "u2 == l2")

        uu2 = u2[:]
        uu2[:0] = [-2, -1]
        eq(uu2, [-2, -1, 0, 1], "uu2 == [-2, -1, 0, 1]")
        uu2[0:] = []
        eq(uu2, [], "uu2 == []")

        # Test __contains__
        for i in u2:
            self.assertTrue(i in u2, "i in u2")
        for i in min(u2)-1, max(u2)+1:
            self.assertTrue(i not in u2, "i not in u2")

        # Test __delslice__

        uu2 = u2[:]
        del uu2[1:2]
        del uu2[0:1]
        eq(uu2, [], "uu2 == []")

        uu2 = u2[:]
        del uu2[1:]
        del uu2[:1]
        eq(uu2, [], "uu2 == []")

        # Test __add__, __radd__, __mul__ and __rmul__

        #self.assertTrue(u1 + [] == [] + u1 == u1, "u1 + [] == [] + u1 == u1")
        self.assertTrue(u1 + [1] == u2, "u1 + [1] == u2")
        #self.assertTrue([-1] + u1 == [-1, 0], "[-1] + u1 == [-1, 0]")
        self.assertTrue(u2 == u2*1 == 1*u2, "u2 == u2*1 == 1*u2")
        self.assertTrue(u2+u2 == u2*2 == 2*u2, "u2+u2 == u2*2 == 2*u2")
        self.assertTrue(u2+u2+u2 == u2*3 == 3*u2, "u2+u2+u2 == u2*3 == 3*u2")

        # Test append

        u = u1[:]
        u.append(1)
        eq(u, u2, "u == u2")

        # Test insert

        u = u2[:]
        u.insert(0, -1)
        eq(u, [-1, 0, 1], "u == [-1, 0, 1]")

        # Test pop

        u = PersistentList([0, -1, 1])
        u.pop()
        eq(u, [0, -1], "u == [0, -1]")
        u.pop(0)
        eq(u, [-1], "u == [-1]")

        # Test remove

        u = u2[:]
        u.remove(1)
        eq(u, u1, "u == u1")

        # Test count
        u = u2*3
        eq(u.count(0), 3, "u.count(0) == 3")
        eq(u.count(1), 3, "u.count(1) == 3")
        eq(u.count(2), 0, "u.count(2) == 0")


        # Test index

        eq(u2.index(0), 0, "u2.index(0) == 0")
        eq(u2.index(1), 1, "u2.index(1) == 1")
        try:
            u2.index(2)
        except ValueError:
            pass
        else:
            self.fail("expected ValueError")

        # Test reverse

        u = u2[:]
        u.reverse()
        eq(u, [1, 0], "u == [1, 0]")
        u.reverse()
        eq(u, u2, "u == u2")

        # Test sort

        u = PersistentList([1, 0])
        u.sort()
        eq(u, u2, "u == u2")

        # Test extend

        u = u1[:]
        u.extend(u2)
        eq(u, u1 + u2, "u == u1 + u2")
Esempio n. 10
0
class HasComments:
    """Mixin class providing simple container of comments."""

    __persistenceVersion = 1

    def __init__(self):
        self.__comments = None
        self.__comment_flags = None

    def _upgradeToVersion1(self):
        # make comments attribute private and remove empty PersistentList
        # so that every comment isn't hogging an extra ZODB object.
        if self.comments:
            self.__comments = PersistentList(self.comments)
        else:
            self.__comments = None

        del self.comments

        # build flags list, which is just a cache of the deleted boolean
        comments = self.get_all_comments()
        if comments:
            self.__comment_flags = PersistentList()
            for comment in comments:
                self.__comment_flags.append(comment.is_deleted(use_attr=True))
        else:
            self.__comment_flags = None

    def add_comment(self, comment):
        if not self.__comments:
            self.__comments = PersistentList()

        if not self.__comment_flags:
            self.__comment_flags = PersistentList()

        self.__comments.append(comment)
        self.__comment_flags.append(0)

    def comment_index(self, item):
        if self.__comments:
            return self.__comments.index(item)
        else:
            # raise ValueError as if item were not in list
            raise ValueError
        
    def get_comment(self, comment_index):
        if not self.__comments:
            return None

        try:
            return self.__comments[comment_index]
        except IndexError:
            return None

    def get_comments(self):
        if self.__comments:
            return [item for item in self.__comments if not item.is_deleted()]
        else:
            return []

    def num_comments(self):
        """Return number of undeleted comments."""
        if not self.__comment_flags:
            return 0

        count = 0
        for f in self.__comment_flags:
            if not f:
                count += 1
        return count

    def num_all_comments(self):
        return len(self.get_all_comments())

    def get_all_comments(self):
        if self.__comments:
            return self.__comments[:]
        else:
            return []

    def get_comment_flags(self, item):
        assert self.__comment_flags is not None
        return self.__comment_flags[self.comment_index(item)]

    def set_comment_flags(self, item, val):
        assert self.__comment_flags is not None
        self.__comment_flags[self.comment_index(item)] = val
    
    def comments_by(self, author):
        """Return comments by author if any.
        
        Comments must have `author` attribute.
        """
        if not self.__comments:
            return []
        return [c for c in self.__comments if c.author is author]

    def num_old_new_comments(self, since_when):
        """Return as a tuple number of comments old and new"""
        old = new = 0
        
        # pmo redefine to use get_all_comments instead of get_comments 6/6/05
        for c in self.get_all_comments():
            if c.date <= since_when:
                old += 1
            else:
                new += 1
        return (old, new)

    def is_duplicate(self, comment):
        """ Check to see if the given comment is a duplicate of
        the latest comment """
        if self.__comments and (len(self.__comments) > 0):
            latest_comment = self.__comments[-1]
            if latest_comment.author == comment.author and \
               latest_comment.title == comment.title and \
               latest_comment.get_summary() == comment.get_summary() and \
               latest_comment.get_main() == comment.get_main() and \
               latest_comment.is_deleted() == comment.is_deleted():
                return True
        return False
Esempio n. 11
0
class Ordering(Persistent):
    """ Store information about the ordering of items within a folder.
    """
    implements(IOrdering)

    def __init__(self):
        Persistent.__init__(self)
        self._items = PersistentList()

    def sync(self, entries):
        # Go do some cleanup.  Any items that are in the folder but
        # not in the ordering, put at end.  Any items that are in the
        # ordering but not in the folder, remove.

        for local_name in self._items:
            if local_name not in entries:
                # Item is in ordering but not in context, remove from
                # ordering.
                self._items.remove(local_name)

        for entry_name in entries:
            if entry_name not in self._items:
                # Item is in folder but not in ordering, append to
                # end.
                self._items.append(entry_name)

    def moveUp(self, name):
        # Move the item with __name__ == name up a position.  If at
        # the beginning, move to last position.

        position = self._items.index(name)
        del self._items[position]
        if position == 0:
            # Roll over to the end
            self._items.append(name)
        else:
            self._items.insert(position - 1, name)

    def moveDown(self, name):
        # Move the item with __name__ == name down a position.  If at
        # the end, move to the first position.

        position = self._items.index(name)
        list_length = len(self._items)
        del self._items[position]
        if position == (list_length - 1):
            # Roll over to the end
            self._items.insert(0, name)
        else:
            self._items.insert(position + 1, name)

    def add(self, name):
        # When a new item is added to a folder, put it at the end.
        if name not in self._items:
            self._items.append(name)

    def remove(self, name):
        # When an existing item is removed from folder, remove from
        # ordering.  Sure would be nice to use events to do this for
        # us.
        if name in self._items:
            self._items.remove(name)

    def items(self):
        return self._items

    def previous_name(self, current_name):
        # Given a position in the list, get the next name, or None if
        # at the end of the list

        position = self._items.index(current_name)
        if position == 0:
            # We are at the end of the list, so return None
            return None
        else:
            return self._items[position - 1]

    def next_name(self, current_name):
        # Given a position in the list, get the next name, or None if
        # at the end of the list

        position = self._items.index(current_name)
        if position == (len(self._items)-1):
            # We are at the end of the list, so return None
            return None
        else:
            return self._items[position + 1]
class PersistentOrderedDict(persistent.Persistent):
    '''
    This class implements the same interface as the `collections.OrderedDict`
    class from the standard library, but uses `persistent` data types for ZODB
    support.
    '''
    def __init__(self, items=None):
        self.key_index = PersistentList()
        self.data = PersistentMapping()
        if items:
            for k, v in items:
                self[k] = v

    def keys(self):
        return self.key_index[:]

    def __setitem__(self, k, v):
        if k not in self.data:
            self.key_index.append(k)
        self.data[k] = v

    def items(self):
        return [(k, v) for k, v in self.iteritems()]

    def iteritems(self):
        for k in self.key_index:
            yield k, self.data[k]

    def values(self):
        return [v for k, v in self.iteritems()]

    def get(self, key):
        return self.data.get(key)

    def __delitem__(self, key):
        del self.data[key]
        i = self.key_index.index(key)
        del self.key_index[i]

    def __getitem__(self, key):
        return self.data[key]

    def setdefault(self, key, default_value):
        if key not in self:
            self[key] = default_value
        return self[key]

    def move_to_end(self, key, last=True):
        assert(key in self)
        items = []
        for k, v in self.items():
            if k != key:
                items.append((k, v))
                del self[k]
        if last:
            items.append((key, self[key]))
            del self[key]
        for k, v in items:
            self[k] = v

    def __contains__(self, key):
        return key in self.data
Esempio n. 13
0
class BodyPartGraph(Persistent):
    """A collection of BodyParts joined by edges."""
    def __init__(self, network_args=None):
        """if network_args is None create empty BPG else create a
        random BPG."""

        log.debug('BodyPartGraph.__init__')
        self.bodyparts = PersistentList()
        self.unrolled = 0
        if not network_args:
            # this is used for unrolled bodypart copies
            self.root = None
        else:
            self.randomInit(network_args)

    def destroy(self):
        for bp in self.bodyparts:
            bp.destroy()

    def step(self):
        for bp in self.bodyparts:
            bp.network.step()
        for bp in self.bodyparts:
            if hasattr(bp, 'motor'):
                bp.motor.step()

    def randomInit(self, network_args):
        while 1:
            # create graph randomly
            del self.bodyparts[:]
            num_bodyparts = random.randint(2, BPG_MAX_NODES)
            log.debug('Creating %d random BodyParts' % (num_bodyparts))
            for _ in range(num_bodyparts):
                bp = BodyPart(network_args)
                self.bodyparts.append(bp)
            # randomly select the root node
            self.root = random.choice(self.bodyparts)
            self.root.isRoot = 1
            root_index = self.bodyparts.index(self.root)
            # possible n^2 connections
            num_connects = random.randint(1, BPG_MAX_EDGES)
            log.debug('creating upto %d random connections', num_connects)
            # Now select randomly and use to create actual connect
            inset = [root_index]
            outset = range(0, root_index) + range(root_index + 1,
                                                  num_bodyparts)
            for _ in range(num_connects):
                # select from inset
                src_i = random.randint(0, len(inset) - 1)
                if not outset:
                    break
                inoutset = inset + outset
                dst_i = random.randint(0, len(inoutset) - 1)
                src = self.bodyparts[inset[src_i]]
                bodyparts_dst_i = inoutset[dst_i]
                dst = self.bodyparts[bodyparts_dst_i]
                src.connectTo(dst)
                # there is no check for an existing edge, so we can get multiple edges between src and dst
                if not bodyparts_dst_i in inset:
                    inset.append(bodyparts_dst_i)
                if bodyparts_dst_i in outset:
                    outset.remove(bodyparts_dst_i)
            u = self.unroll(1)
            if MIN_UNROLLED_BODYPARTS <= len(
                    u.bodyparts) <= MAX_UNROLLED_BODYPARTS:
                self.connectInputNodes()
                self.sanityCheck()
                break

    def getNeighbours(self, bp):
        """Calculate the set of valid neighbour bodyparts of bp

        A bodypart is a neighbour of bp if it is a parent or child in the
        bodypartgraph, or if it is bp itself."""
        assert bp in self.bodyparts
        # find possible sources for connection in this phenotype
        valid_bp_neighbours = [bp]
        # .. all children
        valid_bp_neighbours += [e.child for e in bp.edges]
        # .. parent
        valid_bp_neighbours += [
            p for p in self.bodyparts for e in p.edges if e.child == bp
        ]
        for neighbour in valid_bp_neighbours:
            assert neighbour in self.bodyparts
        log.debug('valid bp neighbours = %s', valid_bp_neighbours)
        return valid_bp_neighbours

    def connectInputNodes(self, sanitycheck=1):
        """Connect all sensory input nodes up to something.

        If the bpg is already unrolled, then it is a phenotype and the results
        won't be backannotated to the genotype input_map. If anything is left
        unconnected, an assert error will be thrown.

        If the bpg isn't already unrolled, then it will be, and any missing
        connections will be randomly selected and backannotated into the
        genotype input_map, so that later calls to unroll and connect will be
        able to succeed in connecting every input node up.
        """
        log.debug('BodyPartGraph.connectInputNodes(self=%s)', self)
        if self.unrolled:
            log.debug('self.unrolled=1')
            backannotate = 0
            p_bpg = self
        else:
            log.debug('self.unrolled=0')
            backannotate = 1
            p_bpg = self.unroll()
        log.debug('p_bpg=%s (bodyparts=%s)' % (p_bpg, p_bpg.bodyparts))
        # find all unconnected nodes
        un = set([(p_dst_bp, p_dst_signal) for p_dst_bp in p_bpg.bodyparts
                  for p_dst_signal in p_dst_bp.network.inputs
                  if not p_dst_signal.externalInputs])
        # and unconnected motors
        un = un.union(
            set([(p_dst_bp, 'MOTOR_%d' % i) for p_dst_bp in p_bpg.bodyparts
                 for i in 0, 1, 2 if not p_dst_bp.motor_input[i]]))

        for (p_dst_bp, p_dst_signal) in un:
            log.debug('UNCONNECTED bp %s signal %s', p_dst_bp, p_dst_signal)
            # find corresponding genotype of this node/motor
            g_bp = p_dst_bp.genotype
            if isinstance(p_dst_signal, node.Node):
                g_dst_signal = g_bp.network[p_dst_bp.network.index(
                    p_dst_signal)]
                assert g_dst_signal in g_bp.network.inputs
            else:
                g_dst_signal = p_dst_signal
            # is there an entry in g_bp.input_map for the target node/motor?
            if not g_bp.input_map.has_key(g_dst_signal):
                g_bp.input_map[g_dst_signal] = PersistentList()
            # are there matching maps for this phenotype topology?
            p_neighbours = p_bpg.getNeighbours(p_dst_bp)
            # find all neighbour bps with valid src bp,signal for this dst in input_map
            matches = [(g_src_bp, g_src_signal, p_src_bp, weight)
                       for (g_src_bp, g_src_signal,
                            weight) in g_bp.input_map[g_dst_signal]
                       for p_src_bp in p_neighbours
                       if p_src_bp.genotype is g_src_bp]

            log.debug('input_map matches = %s', matches)
            p_source = None
            for (g_src_bp, g_src_signal, p_src_bp, weight) in matches:
                log.debug(
                    'using prestored map g_src_bp=%s g_src_signal=%s weight=%f',
                    g_src_bp, g_src_signal, weight)
                # convert genotype src signal to phenotype value
                if type(g_src_signal) is str:
                    p_source = (p_src_bp, g_src_signal, weight)
                    break
                else:
                    # find phenotype src node
                    g_src_index = g_src_bp.network.index(g_src_signal)
                    p_src_node = p_src_bp.network[g_src_index]
                    if isinstance(p_dst_signal, node.Node) and isinstance(
                            p_src_node, node.Node) and p_src_bp == p_dst_bp:
                        continue
                    # assert not two nodes in same bp network
                    assert not (isinstance(p_dst_signal, node.Node)
                                and isinstance(p_src_node, node.Node)) or (
                                    p_src_bp != p_dst_bp)
                    # don't allow an external_input if the connection
                    # already exists internally to the network
                    if not isinstance(
                            p_dst_signal, node.Node
                    ) or p_src_node not in p_dst_signal.inputs:
                        # set source to a phenotype (bp,s)
                        assert p_src_node in p_src_bp.network
                        p_source = (p_src_bp, p_src_node, weight)
                        break
                    log.debug('rejected map - nodes already connected')

            if not p_source:
                # no entry in input_map for this node/motor
                # raise error if we aren't connecting up a genotype bpg
                assert backannotate
                # pick a random (bp, signal) from p_bp and backannotate into g_bp.input_map
                p_src_bp = random.choice(p_neighbours)
                # no direct connects between sensors and motors
                posSrcs = []
                if not isinstance(p_dst_signal, str):
                    posSrcs = ['CONTACT', 'JOINT_0', 'JOINT_1', 'JOINT_2']
                # disallow connects from outnode to innode of same network
                if type(p_dst_signal) == str or p_src_bp != p_dst_bp:
                    posSrcs += p_src_bp.network.outputs
                if isinstance(p_dst_signal, node.Node):
                    for x in posSrcs:
                        assert x not in p_dst_signal.inputs
                    # remove any possible srcs that node is already connected to
                    posSrcs = [
                        x for x in posSrcs if x not in p_dst_signal.inputs
                    ]
                log.debug('possible connects %s <- %s', p_dst_signal, posSrcs)
                p_src_signal = random.choice(posSrcs)
                if isinstance(p_dst_signal, node.Node):
                    assert p_src_signal not in p_dst_signal.inputs
                if isinstance(p_src_signal, node.Node):
                    assert p_src_signal in p_src_bp.network
                weight = random.uniform(-7, 7)
                p_source = (p_src_bp, p_src_signal, weight)

                # find genotype of the chosen phenotype (bp,s)
                g_src_bp = p_src_bp.genotype
                weight = random.uniform(-7, 7)
                if isinstance(p_src_signal, node.Node):
                    # phenotype output node -> genotype output node
                    # (depends on offsets being the same)
                    g_src_signal = g_src_bp.network[p_src_bp.network.index(
                        p_src_signal)]
                    assert g_src_signal in g_src_bp.network
                    genosource = (g_src_bp, g_src_signal, weight)
                else:
                    genosource = (g_src_bp, p_src_signal, weight)

                log.debug('entering %s -> %s into bp.input_map', genosource,
                          g_dst_signal)
                # add to genotype.input_map our backannotated source
                assert (g_dst_signal, genosource) not in g_bp.input_map.items()
                g_bp.input_map[g_dst_signal].append(genosource)
                assert g_bp in [pbp.genotype for pbp in p_bpg.bodyparts]

            # add to signal target.
            if isinstance(p_dst_signal, node.Node):
                (p_src_bp, p_src_signal, weight) = p_source
                if isinstance(p_src_signal, node.Node):
                    assert p_src_signal in p_src_bp.network
                p_dst_signal.addExternalInput(p_src_bp, p_src_signal, weight)
            elif p_dst_signal[:6] == 'MOTOR_':
                i = ord(p_dst_signal[6]) - ord('0')
                assert not p_dst_bp.motor_input[i]
                (sbp, ssig, weight) = p_source
                log.debug('p_bp.motor_input[%d]=(%s,%s)' % (i, sbp, ssig))
                assert sbp in p_bpg.bodyparts
                p_dst_bp.motor_input[i] = p_source
            else:
                assert 0

        log.debug('/connectInputNodes, calling sanityCheck')
        if sanitycheck:
            p_bpg.sanityCheck()

        log.debug('/BodyPartGraph.connectInputNodes')

    def getInputs(self, bp):
        """Return a list of all the external inputs to bodypart bp.

        Returns: [ (targetneuron, (srcbp, signal, weight), ... ]"""

        if self.unrolled:
            s0 = [(neuron, externalInput) for neuron in bp.network.inputs
                  for externalInput in neuron.externalInputs]
            sources = []
            for (n, e) in s0:
                w = None
                if isinstance(n, node.WeightNode):
                    w = n.weights[e]
                (b, s) = e
                sources += [(n, (b, s, w))]
            if bp.joint == 'hinge':
                sources += [('MOTOR_2', bp.motor_input[2])]
            elif bp.joint == 'universal':
                sources += [('MOTOR_0', bp.motor_input[0]),
                            ('MOTOR_1', bp.motor_input[1])]
            elif bp.joint == 'ball':
                sources += [('MOTOR_0', bp.motor_input[0]),
                            ('MOTOR_1', bp.motor_input[1]),
                            ('MOTOR_2', bp.motor_input[2])]
        else:
            sources = [(neuron, src) for neuron in bp.input_map
                       for src in bp.input_map[neuron]]
            # src is (bp,sig,wei)
            # remove invalid motor connections
            if bp.joint == 'hinge':
                invalid = ['MOTOR_0', 'MOTOR_1']
            elif bp.joint == 'universal':
                invalid = ['MOTOR_2']
            elif bp.joint == 'ball':
                invalid = []
            sources = [(n, s) for (n, s) in sources if n not in invalid]
        return sources

    def unroll(self, skipNetwork=0):
        """Returns new BPG, of possibly 0 size.

        The BPG will be unrolled. Each path through the network will
        be traced, and a new cloned body part is made for each
        original. The connectivity of the copy will be the same as the
        original, except the copy will respect upper limits on the
        number of instances of any given body part in a single path,
        and final copy instances of a part will be connected to 'final
        edge' children. No loops are left in the new BPG."""
        log.debug('BodyPartGraph.unroll')
        # we need a count of every bp to make sure we don't loop too many times
        for b in self.bodyparts:
            b._v_instance_count = 0
        for b in self.bodyparts:
            assert b._v_instance_count == 0
        bpg = unroll_bodypart(self.root, skipNetwork)
        bpg.unrolled = 1
        log.debug('/BodyPartGraph.unroll (bpg size %d -> size %d)',
                  len(self.bodyparts), len(bpg.bodyparts))
        return bpg

    def sanityCheck(self):
        "See if anything is wrong with this BodyPartGraph"
        log.debug('BodyPartGraph.sanityCheck')
        # check everything we can reach from the root is in our bodypart list
        assert self.root in self.bodyparts
        bps = [self.root]
        assert len([x for x in self.bodyparts if x.isRoot == 1]) == 1
        reachable = bps
        while bps:
            bp = bps[0]
            if bp in reachable:
                # already found
                del bps[0]
            else:
                reachable.append(bp)
                assert self.bodyparts.count(bp) == 1
                #assert bp._v_instance_count >= 0
                for e in bp.edges:
                    assert self.bodyparts.count(e.child) == 1
                    if e.child not in reachable:
                        bps.append(e.child)

        # check every target child is in our bodyparts list
        for i in range(len(self.bodyparts)):
            bp = self.bodyparts[i]
            for e in bp.edges:
                assert self.bodyparts.count(e.child) == 1

        # make sure that everything is connected
        if self.unrolled:
            phen_bpg = self
        else:
            phen_bpg = self.unroll()
        phen_bpg.connectInputNodes(sanitycheck=0)
        # all external inputs should have a single connection, otherwise it
        # should be None
        for bp in phen_bpg.bodyparts:
            for n in bp.network:
                if n in bp.network.inputs:
                    assert n.externalInputs
                    for (sbp, src) in n.externalInputs:
                        assert sbp in phen_bpg.bodyparts
                        if isinstance(src, node.Node):
                            assert src in sbp.network
                            assert src in sbp.network.outputs
                            assert bp != sbp  # no inter-network connections
            # check motor connections
            for i in 0, 1, 2:
                assert bp.motor_input[i]
                (sbp, src, weight) = bp.motor_input[i]
                assert sbp in phen_bpg.bodyparts
                if isinstance(src, node.Node):
                    assert src in sbp.network.outputs
        if not self.unrolled:
            # assert that *genotype* has no motor_inputs
            for bp in self.bodyparts:
                for i in 0, 1, 2:
                    assert not bp.motor_input[i]
        for bp in self.bodyparts:
            if self.unrolled:
                # we only use input maps for the genotype, since phenotype BPs
                # link back to their genotype BPs anyway
                assert not bp.input_map
            else:
                # Make sure all entries in input_map are valid bodyparts and neurons
                for (tsignal, srclist) in bp.input_map.items():
                    assert tsignal in bp.network.inputs or tsignal[:5] == 'MOTOR'
                    for (sbp, ssignal, w) in srclist:
                        assert sbp in self.bodyparts
                        if isinstance(ssignal, node.Node):
                            assert ssignal in sbp.network
                        else:
                            assert ssignal in [
                                'JOINT_0', 'JOINT_1', 'JOINT_2', 'CONTACT'
                            ]
                        assert isinstance(w, float)
        # check src and dst nodes for externalInputs are in respective bp.network
        for bp in phen_bpg.bodyparts:
            sources = phen_bpg.getInputs(bp)
            for (tsignal, (sbp, signal, w)) in sources:
                sbp_i = phen_bpg.bodyparts.index(sbp)
                tbp_i = phen_bpg.bodyparts.index(bp)
                if isinstance(tsignal, node.Node):
                    bp.network.index(tsignal)
                if isinstance(signal, node.Node):
                    assert signal in sbp.network

    def fixup(self):
        """Fix any problems with this BodyPartGraph (ie. invalid connections,
        bad root, etc.) This is called on rolled bpgs after mutation, and
        unrolled after modification to fit simulation constraints (eg.
        MAX_UNROLLED_BODYPARTS).  """
        # remove edges that point to invalid children
        for bp in self.bodyparts:
            edges_to_remove = []
            for e in bp.edges:
                if e.child not in self.bodyparts:
                    #bp.edges.remove(e)
                    edges_to_remove.append(e)
            for e in edges_to_remove:
                bp.edges.remove(e)
        # make sure root exists
        if self.root not in self.bodyparts or len(
            [x for x in self.bodyparts if x.isRoot == 1]) != 1:
            # randomly select the root node
            for b in self.bodyparts:
                b.isRoot = 0
            self.root = random.choice(self.bodyparts)
            self.root.isRoot = 1
        assert len([x for x in self.bodyparts if x.isRoot == 1]) == 1
        # remove input_map entries that are invalid
        for bp in self.bodyparts:
            if bp.input_map:
                # we need to keep a list and erase at the end otherwise we fall into
                # the trap of removing items for a mutable list whilst iterating
                # over it
                for (tneuron, srclist) in bp.input_map.items():
                    if tneuron not in bp.network.inputs:
                        del bp.input_map[tneuron]
                    else:
                        for (sbp, sneuron, w) in srclist[:]:
                            if sbp not in self.bodyparts or sneuron not in sbp.network:
                                srclist.remove((sbp, sneuron, w))
        for bp in self.bodyparts:
            if bp.input_map:
                for (tneuron, srclist) in bp.input_map.items():
                    for (sbp, sneuron, w) in srclist:
                        assert sbp in self.bodyparts

        # check whether input_map entries are still valid
        for bp in self.bodyparts:
            if bp.input_map:
                krm = []
                for k in bp.input_map.keys():
                    if k not in self.bodyparts:
                        krm.append(k)
                    else:
                        # key is valid
                        toremove = []
                        for (sbp, sig, w) in bp.input_map[k]:
                            # check sbp is ok and src is a string or output node
                            if sbp not in self.bodyparts or (
                                    isinstance(sig, node.Node)
                                    and sig not in sbp.network.outputs):
                                toremove.append((sbp, sig, w))
                        for x in toremove:
                            bp.input_map[k].remove(x)
                for k in krm:
                    del bp.input_map[k]

        self.connectInputNodes()
        self.sanityCheck()

    def mutate_delete_edges(self, p):
        "Randomly erase edges in this BodyPartGraph with probability p"
        for bp in self.bodyparts:
            for i in range(len(bp.edges) - 1, -1, -1):
                if random.random() < p:
                    # delete edge
                    log.debug('delete edge')
                    self.mutations += 1
                    del bp.edges[i]
                    self.fixup()
                    self.sanityCheck()

    def mutate_add_edges(self, p):
        "Randomly add edges in this BodyPartGraph with probability p"
        for s_bp in self.bodyparts:
            if random.random() < p:
                #and len(self.bodyparts) < BPG_MAX_EDGES:
                # add edge
                log.debug('add edge')
                self.mutations += 1
                t_bp = random.choice(self.bodyparts)
                e = Edge(t_bp, random.choice([-1, 1]), random.choice([0, 1]))
                s_bp.edges.append(e)
                # we now have new nodes in the unrolled bpg which don't have
                # entries in their genotype bp for their neighbours, so fixup
                self.fixup()
                self.sanityCheck()

    def mutate_delete_nodes(self, p):
        "Randomly delete nodes in this BodyPartGraph with probability p"
        for i in range(len(self.bodyparts) - 1, -1, -1):
            if random.random() < p and len(self.bodyparts) > 1:
                # delete node
                log.debug('delete node')
                self.mutations += 1
                bp_del = self.bodyparts[i]
                # delete all edges pointing to this node
                for bp in self.bodyparts:
                    edges_to_remove = []
                    for e in bp.edges:
                        if e.child == bp_del:
                            edges_to_remove.append(e)
                    for e in edges_to_remove:
                        bp.edges.remove(e)
                self.bodyparts.remove(bp_del)
                if bp_del == self.root:
                    self.root = random.choice(self.bodyparts)
                    self.root.isRoot = 1
                self.fixup()
                self.sanityCheck()

    def mutate_copy_nodes(self, p):
        "Randomly copy nodes in this BodyPartGraph with probability p"
        for i in range(len(self.bodyparts)):
            if random.random() < p and len(self.bodyparts) < BPG_MAX_NODES:
                # copy and mutate node
                log.debug('copy node')
                self.mutations += 1
                c = copy.deepcopy(self.bodyparts[i])
                # we did in fact just copy everything the bp links to ...
                # fixme: correct? yes? efficient? probably not.
                c.edges = PersistentList()
                c.mutate(p)
                self.bodyparts.append(c)

                # random incoming edges
                i = random.randint(1, len(self.bodyparts) / 2)
                for _ in range(i):
                    # add edges
                    e = Edge(c, random.choice([-1, 1]), random.choice([0, 1]))
                    s_bp = random.choice(self.bodyparts)
                    s_bp.edges.append(e)

                # random outgoing edges
                i = random.randint(1, len(self.bodyparts) / 2)
                for _ in range(i):
                    # add edges
                    t_bp = random.choice(self.bodyparts)
                    e = Edge(t_bp, random.choice([-1, 1]),
                             random.choice([0, 1]))
                    c.edges.append(e)
                self.fixup()
                self.sanityCheck()

    def mutate_inputmaps(self, p):
        "Randomly rewire input_maps in each BodyPart with probability p"
        for bp in self.bodyparts:
            for _ in range(len(bp.input_map)):
                if random.random() < p:
                    log.debug('mutate input_map')
                    self.mutations += 1
                    di = random.choice(bp.input_map.keys())
                    if random.random() < 0.5:
                        del bp.input_map[di]
                    else:
                        # mutate weight
                        xp = random.randrange(0, len(bp.input_map[di]))
                        x = list(bp.input_map[di][xp])
                        x[2] = rnd(-7, 7, x[2])
                        bp.input_map[di][xp] = tuple(x)

        self.connectInputNodes()
        self.sanityCheck()

    def mutate(self, p):
        "Mutate the BodyPartGraph nodes, edges, and all parameters."
        log.debug('bpg.mutate(p=%f)', p)

        self.sanityCheck()
        self.mutations = 0
        self.mutate_delete_edges(p)
        self.mutate_add_edges(p)
        self.mutate_delete_nodes(p)
        self.mutate_copy_nodes(p)
        self.mutate_inputmaps(p)

        # FIXME: mutate number of input and output nodes
        # mutate motors and sensors?
        self.sanityCheck()
        for bp in self.bodyparts:
            # mutate individual parameters
            self.mutations += bp.mutate(p)
            # since bp mutate can change the topology of the unrolled graph via
            # recursive_limit, we need to fix up external_input and maybe others
            self.fixup()
            self.sanityCheck()

        self.sanityCheck()

        log.debug('/bpg.mutate')
        return self.mutations
Esempio n. 14
0
class PackRat(Mob):
    'Collects things and brings them back to its nest.'

    name_aka = ['rat']

    name = 'packrat'
    short = 'A large scruffy rat. Is it carrying something?'

    def __init__(self, name='', short='', long=''):
        Mob.__init__(self, name, short, long)
        self._path_home = PersistentList()
        self._searching = True
        self._has_dug_home = False
        self.set_action_weights(action_move=2000)
        self.period = 5 # seconds

    def near_drop(self, info):
        dropper = info['actor']
        if dropper is not self:
            if (self._searching and self.room!=self.home
                            and not self.items() and self.awake):
                item = info['item']
                if item in self.room:
                    self.get_item(item, self.room)
                    self._searching = False

                    if not self._has_dug_home:
                        self._dig_home()

    def action_move(self):
        x = self._choose_exit()
        if x is not None:
            self._move(x)
            if self._searching and x.destination!=self.home and not self.items():
                item = self._search()
                if item and not self._has_dug_home:
                    self._dig_home()
            if not self._searching and x.destination==self.home:
                self._store_item()

    def _choose_exit(self):
        origin = self.room
        exits = filter(self.can_see, origin.exits())
        x = None
        if exits:
            if self._searching:
                x = random.choice(exits)
                if x.locked:
                    x = None
            else:
                for x in exits:
                    try:
                        if x.destination == self._path_home[-2]:
                            break
                    except KeyError:
                        print 'No -2'
        return x

    def _move(self, x):
        success, msg = self.go(x)
        if success:
            dest = x.destination
            if dest not in self._path_home:
                self._path_home.append(dest)
            else:
                i = self._path_home.index(dest)
                l = len(self._path_home)
                if i+1 < l:
                    for d in range(i+1, l):
                        del self._path_home[-1]

    def _search(self):
        items = filter(self.can_see, self.room.items())
        if items:
            item = items[0]
            self.get_item(item, self.room)
            self._searching = False
            return item
        else:
            return None

    def _dig_home(self):
        home = rooms.Room('rat nest', "The rat's nest.")
        x = exits.Exit('hole', 'A roughly dug hole.',
                        room=self.room,
                        destination=home, return_name='exit')
        self.home = home
        for i in range(len(self._path_home)):
            self._path_home.pop()
        self._path_home.append(home)
        self._path_home.append(self.room)
        self.room.action(dict(act='dig', actor=self, exit=x))
        self._has_dug_home = True

    def _store_item(self):
        item = self.items()[0]
        self.drop_item(item)
        self._searching = True
Esempio n. 15
0
class PersistentOrderedContainer(PersistentContainer):
    def __init__(self, name, parent=None):
        PersistentContainer.__init__(self, name, parent)
        self.__children = PersistentList()

    def index(self, item):
        return self.__children.index(item)

    def get_children(self, index):
        return self.__children[index]

    @property
    def children(self):
        return list(self.__children)

    @children.setter
    def children(self, children):
        if len(children) != len(self):
            raise ValueError('len(children) and len(self) must be equal')
        for child in children:
            if not child.name in self:
                raise ValueError('children and self must ' \
                                 'contain the same objects')
        self.__children = PersistentList(children)

    def add(self, item):
        if item.__name__ not in self:
            self.__children.append(item)
        else:
            raise ValueError('The container already contains this item')
        PersistentContainer.add(self, item)

    append = add

    def insert(self, index, item):
        if item.__name__ not in self:
            self.__children.insert(index, item)
        else:
            raise ValueError('The container already contains this item')
        PersistentContainer.append(self, item)

    def __setitem__(self, name, item):
        already_in_children = name in self
        PersistentContainer.__setitem__(self, name, item)
        if not already_in_children:
            self.__children.append(item)

    def __delitem__(self, name):
        if name in self:
            self.__children.remove(self[name])
        PersistentContainer.__delitem__(self, name)

    def __iter__(self):
        return self.iterkeys()

    def keys(self):
        return [child.name for child in self.__children]

    def values(self):
        return [child for child in self.__children]

    def items(self):
        return [(child.name, child) for child in self.__children]

    def iterkeys(self):
        for child in self.__children:
            yield child.name

    def itervalues(self):
        for child in self.__children:
            yield child

    def iteritems(self):
        for child in self.__children:
            yield child.name, child
Esempio n. 16
0
class PersistentOrderedContainer(PersistentContainer):
    def __init__(self, name, parent=None):
        PersistentContainer.__init__(self, name, parent)
        self.__children = PersistentList()

    def index(self, item):
        return self.__children.index(item)

    def get_children(self, index):
        return self.__children[index]

    @property
    def children(self):
        return list(self.__children)

    @children.setter
    def children(self, children):
        if len(children) != len(self):
            raise ValueError('len(children) and len(self) must be equal')
        for child in children:
            if not child.name in self:
                raise ValueError('children and self must ' \
                                 'contain the same objects')
        self.__children = PersistentList(children)

    def add(self, item):
        if item.__name__ not in self:
            self.__children.append(item)
        else:
            raise ValueError('The container already contains this item')
        PersistentContainer.add(self, item)
    append = add

    def insert(self, index, item):
        if item.__name__ not in self:
            self.__children.insert(index, item)
        else:
            raise ValueError('The container already contains this item')
        PersistentContainer.append(self, item)

    def __setitem__(self, name, item):
        already_in_children = name in self
        PersistentContainer.__setitem__(self, name, item)
        if not already_in_children:
            self.__children.append(item)

    def __delitem__(self, name):
        if name in self:
            self.__children.remove(self[name])
        PersistentContainer.__delitem__(self, name)

    def __iter__(self):
        return self.iterkeys()

    def keys(self):
        return [child.name for child in self.__children]

    def values(self):
        return [child for child in self.__children]

    def items(self):
        return [(child.name, child) for child in self.__children]

    def iterkeys(self):
        for child in self.__children:
            yield child.name

    def itervalues(self):
        for child in self.__children:
            yield child

    def iteritems(self):
        for child in self.__children:
            yield child.name, child
Esempio n. 17
0
class RecordContainer(Persistent):
    """
    Base/default record container uses PersistentDict for entry storage
    and PersistentList to store ordered keys.  This base container class
    does not advocate one place of storage for the container in a ZODB
    over another, so subclass implementations may choose to implement a
    container within a placeful (e.g. OFS or CMF Content item) or placeless
    (local utility) storage context.  Only a placeless context is supported
    by direct users of this class (without subclassing).

    For a container with hundreds of items or more, consider using instead
    BTreeRecordContainer as an implementation or base class, as it should
    handle memory usage and insert performance much better for larger sets
    of records.

    Usage
    -----

    RecordContainer acts as CRUD controller for working with records.

    The RecordContainer is an addressible object in the system, either as a
    registered utility (or with a subclass as "contentish" (CMF) content).

    Records themselves are not content, but data that are possibly
    non-atomic elements of an atomic content item (if the container is
    implemented in a subclass of RecordContainer as contentish).

    Usage:
    ------

    We need a record container object:

    >>> from uu.record.base import Record, RecordContainer
    >>> container = RecordContainer()
    >>> from uu.record.interfaces import IRecordContainer
    >>> assert IRecordContainer.providedBy(container)

    Record containers have length and containment checks:

    >>> assert len(container) == 0
    >>> import uuid  # keys for entries are stringified UUIDs
    >>> randomuid = str(uuid.uuid4())
    >>> assert randomuid not in container
    >>> assert container.get(randomuid, None) is None

    And they have keys/values/items methods like a mapping:

    >>> assert container.keys() == ()
    >>> assert container.values() == ()
    >>> assert container.items() == ()  # of course, these are empty now.

    Before we add records to a container, we need to create them; there are
    two possible ways to do this:

    >>> from uu.record.base import Record
    >>> entry1 = Record()
    >>> entry2 = container.create()  # preferred factory

    Both factory mechanisms create an entry item with a record_uid attribute:

    >>> from uu.record.interfaces import IRecord
    >>> assert IRecord.providedBy(entry1)
    >>> assert IRecord.providedBy(entry2)
    >>> is_uuid = lambda u: isinstance(u, str) and len(u) == 36
    >>> assert is_uuid(entry1.record_uid)
    >>> assert is_uuid(entry2.record_uid)

    And, these are RFC 4122 UUIDs, so even randomly generated 128-bit ids
    have near zero chance of collision:

    >>> assert entry1.record_uid != entry2.record_uid
    >>> assert entry2.record_uid != randomuid

    The record objects provide plone.uuid.interfaces.IAttributeUUID as an
    alternative way to get the UUID value (string representation) by
    adapting to IUUID:

    >>> from zope.configuration import xmlconfig
    >>> import plone.uuid
    >>> c = xmlconfig.file('configure.zcml', plone.uuid)  # load registrations
    >>> from plone.uuid.interfaces import IUUID, IAttributeUUID
    >>> from zope.component import queryAdapter
    >>> assert IAttributeUUID.providedBy(entry1)
    >>> assert queryAdapter(entry1, IUUID) is not None
    >>> assert queryAdapter(entry1, IUUID) == entry1.record_uid

    Now when we have a parent context with a schema, the created entries will
    be signed with the schema and provide it.

    RecordContainer.create() is the preferred factory when processing data.
    This is because it can take a mapping of keys/values, and copy each
    field name/value onto object attributes -- if and only if the attribute
    in question matches a type whitelist and a name blacklist filter.

    >>> entry4 = container.create(data={'record_uid':randomuid})
    >>> assert entry4.record_uid == randomuid
    >>> entry5 = container.create(data={'count':5})
    >>> assert entry5.count == 5
    >>> entry6 = container.create(data={'_bad_name'    : True,
    ...                                  'count'        : 2,
    ...                                  'bad_value'    : lambda x: x })
    >>> assert not hasattr(entry6, '_bad_name')  # no leading underscores
    >>> assert entry6.count == 2
    >>> assert not hasattr(entry6, 'bad_value')  # function not copied!

    Of course, merely using the record container object as a factory for
    new records does not mean they are stored within (yet):

    >>> assert entry4.record_uid not in container
    >>> assert entry4.record_uid not in container.keys()

    Let's add an item:

    >>> container.add(entry4)

    There are two ways to check for containment, by either key or value:

    >>> assert entry4 in container
    >>> assert entry4.record_uid in container

    We can get records using a (limited, read) mapping-like interface:

    >>> assert len(container) == 1  # we just added the first entry
    >>> assert container.values()[0] is entry4
    >>> assert container.get(entry4.record_uid) is entry4
    >>> assert container[entry4.record_uid] is entry4

    We can deal with references to entries also NOT in the container:

    >>> import uuid
    >>> randomuid = str(uuid.uuid4())
    >>> assert randomuid not in container
    >>> assert container.get(str(uuid.uuid4()), None) is None
    >>> assert entry1.record_uid not in container

    And we can check containment on either an instance or a UID; checking on
    an instance is just a convenience that uses its UID (record_uid) field
    to check for actual containment:

    >>> assert entry4.record_uid in container
    >>> assert entry4 in container  # shortcut!

    However, it should be noted for good measure:

    >>> assert entry4 in container.values()
    >>> assert entry4.record_uid in container.keys()
    >>> assert entry4 not in container.keys()  # of course!
    >>> assert (entry4.record_uid, entry4) in container.items()

    We can modify a record contained directly; this is the most direct and
    low-level update interface for any entry:

    >>> _marker = object()
    >>> assert getattr(entry4, 'title', _marker) is _marker
    >>> entry4.title = u'Curious George'
    >>> assert container.get(entry4.record_uid).title == u'Curious George'

    We can add another record:

    >>> container.add(entry6)
    >>> assert entry6 in container
    >>> assert entry6.record_uid in container
    >>> assert len(container) == 2

    Keys, values, items are always ordered; since we added entry4, then
    entry6 previously, they will return in that order:

    >>> expected_order = (entry4, entry6)
    >>> expected_uid_order = tuple([e.record_uid for e in expected_order])
    >>> expected_items_order = tuple(zip(expected_uid_order, expected_order))
    >>> assert tuple(container.keys()) == expected_uid_order
    >>> assert tuple(container.values()) == expected_order
    >>> assert tuple(container.items()) == expected_items_order

    We can re-order this; let's move entry6 up to position 0 (first):

    >>> container.reorder(entry6, offset=0)
    >>> expected_order = (entry6, entry4)
    >>> expected_uid_order = tuple([e.record_uid for e in expected_order])
    >>> expected_items_order = tuple(zip(expected_uid_order, expected_order))
    >>> assert tuple(container.keys()) == expected_uid_order
    >>> assert tuple(container.values()) == expected_order
    >>> assert tuple(container.items()) == expected_items_order

    We can also re-order by UID instead of record/entry reference:

    >>> container.reorder(entry6.record_uid, offset=1)  # where it was before
    >>> expected_order = (entry4, entry6)
    >>> expected_uid_order = tuple([e.record_uid for e in expected_order])
    >>> expected_items_order = tuple(zip(expected_uid_order, expected_order))
    >>> assert tuple(container.keys()) == expected_uid_order
    >>> assert tuple(container.values()) == expected_order
    >>> assert tuple(container.items()) == expected_items_order

    And we can remove records from containment by UID or by reference (note,
    del(container[key]) uses __delitem__ since a container is a writable
    mapping):

    >>> del(container[entry6])
    >>> assert entry6 not in container
    >>> assert entry6.record_uid not in container
    >>> assert len(container) == 1
    >>> assert entry4 in container
    >>> del(container[entry4.record_uid])
    >>> assert entry4 not in container
    >>> assert len(container) == 0

    Earlier, direct update of objects was demonstrated: get an object and
    modify its properties.  This attribute-setting mechanism is the best
    low-level interface, but it does not (a) support a wholesale update
    from either a field dictionary/mapping nor another object providing
    IRecord needing its data to be copied; nor (b) support notification
    of zope.lifecycle object events.

    Given these needs, a high level interface for update exists, with the
    record object acting as a controller for updating contained entries.
    This provides for update via another entry (a field-by-field copy) or
    from a data dictionary/mapping.

    >>> newuid = str(uuid.uuid4())
    >>> data = {    'record_uid' : newuid,
    ...             'title'      : u'George',
    ...             'count'      : 9,
    ...        }
    >>> assert len(container) == 0  # empty, nothing in there yet!
    >>> assert newuid not in container

    Note, update() returns an entry; return value can be ignored if caller
    deems it not useful.

    >>> entry = container.update(data)
    >>> assert newuid in container  # update implies adding!
    >>> assert entry is container.get(newuid)
    >>> assert entry.title == data['title']
    >>> assert entry.count == data['count']

    Now, the entry we just modified was also added.  We can modify it again:

    >>> data = {    'record_uid' : newuid,
    ...             'title'      : u'Curious George',
    ...             'count'      : 2,
    ...        }
    >>> entry = container.update(data)
    >>> assert newuid in container     # same uid
    >>> entry.title
    u'Curious George'
    >>> entry.count
    2
    >>> assert len(container) == 1     # same length, nothing new was added.

    We could also create a stand-in entry for which data is copied to the
    permanent entry with the same UUID on update:

    >>> temp_entry = container.create()
    >>> temp_entry.record_uid = newuid      # overwrite with the uid of entry
    >>> temp_entry.title = u'Monkey jumping on the bed'
    >>> temp_entry.count = 0

    temp_entry is a stand-in which we will pass to update(), when we really
    intend to modify entry (they have the same UID):

    >>> real_entry = container.update(temp_entry)
    >>> assert container.get(newuid) is not temp_entry
    >>> assert container.get(newuid) is entry  # still the same object...
    >>> assert container.get(newuid) is real_entry
    >>> entry.title                             # ...but data is modified!
    u'Monkey jumping on the bed'
    >>> entry.count
    0
    >>> assert len(container) == 1     # same length, nothing new was added.


    JSON integration
    ----------------

    As a convenience, update_all() parses JSON into a data dict for use by
    update(), using the Python 2.6 json library (aka/was: simplejson):

    >>> party_form = RecordContainer()
    >>> entry = party_form.create()
    >>> party_form.add(entry)
    >>> data = {  # mock data we'll serialize to JSON
    ...     'record_uid': entry.record_uid,  # which record to update
    ...     'name'      : 'Me',
    ...     'birthday'  : u'77/06/01',
    ...     'party_time': u'11/06/05 12:00',
    ...     }
    >>> import json  # requires Python >= 2.6
    >>> data['name'] = 'Chunky monkey'
    >>> serialized = json.dumps([data,], indent=2)  # JSON array of one item...
    >>> print serialized  # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
    [
      {
        "party_time": "11/06/05 12:00",
        "birthday": "77/06/01",
        "name": "Chunky monkey",
        "record_uid": "..."
      }
    ]

    The JSON created above is useful enough for demonstration, despite being
    only a single-item list.

    >>> assert getattr(entry, 'name', _marker) is _marker  # before, no attr
    >>> party_form.update_all(serialized)
    >>> entry.name  # after update
    u'Chunky monkey'

    update_all() also takes a singular record, not just a JSON array:

    >>> data['name'] = 'Curious George'
    >>> serialized = json.dumps(data, indent=2)  # JSON object, not array.
    >>> print serialized  # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
    {
      "party_time": "11/06/05 12:00",
      "birthday": "77/06/01",
      "name": "Curious George",
      "record_uid": "..."
    }
    >>> entry.name  # before
    u'Chunky monkey'
    >>> party_form.update_all(serialized)
    >>> entry.name  # after update
    u'Curious George'

    JSON parsing also supports a "bundle" or wrapper object around a list of
    entries, where the wrapper contains metadata about the form itself, not
    its entries (currently, this is just the process_changes field, which
    is sourced from the JSON bundle/wrapper object field called 'notes').
    When wrapped, the list of entries is named 'entries' inside the wrapper.

    >>> data['name'] = u'Party monkey'
    >>> serialized = json.dumps({'notes'    : 'something changed',
    ...                          'entries'  : [data,]},
    ...                         indent=2)  # JSON array of one item...
    >>> entry.name  # before
    u'Curious George'
    >>> party_form.update_all(serialized)
    >>> entry.name  # after
    u'Party monkey'

    It should be noted that update_all() removes entries not in the data
    payload, and it preserves the order contained in the JSON entries.

    Object events
    -------------

    CRUD methods on a controlling object should have some means of extension,
    pluggable to code that should subscribe to CRUD (object lifecycle) events.
    We notify four distinct zope.lifecycleevent object event types:

    1. Object created (zope.lifecycleevent.interfaces.IObjectCreatedEvent)

    2. Object addded to container:
        (zope.lifecycleevent.interfaces.IObjectAddedEvent).

    3. Object modified (zope.lifecycleevent.interfaces.IObjectModifiedEvent)

    4. Object removed (zope.lifecycleevent.interfaces.IObjectRemovedEvent)

    Note: the create() operation both creates and modifies: as such, both
    created and modified events are fired off, and since most creations also
    are followed by an add() to a container, you may have three events to
    subscribe to early in a new entry's lifecycle.

    First, some necessary imports of events and the @adapter decorator:

    >>> from zope.component import adapter
    >>> from zope.lifecycleevent import IObjectCreatedEvent
    >>> from zope.lifecycleevent import IObjectModifiedEvent
    >>> from zope.lifecycleevent import IObjectRemovedEvent
    >>> from zope.lifecycleevent import IObjectAddedEvent

    Let's define dummy handlers:

    >>> @adapter(IRecord, IObjectCreatedEvent)
    ... def handle_create(context, event):
    ...     print 'object created'
    ...
    >>> @adapter(IRecord, IObjectModifiedEvent)
    ... def handle_modify(context, event):
    ...     print 'object modified'
    ...
    >>> @adapter(IRecord, IObjectRemovedEvent)
    ... def handle_remove(context, event):
    ...     print 'object removed'
    ...
    >>> @adapter(IRecord, IObjectAddedEvent)
    ... def handle_add(context, event):
    ...     print 'object added'
    ...

    Next, let's configure zope.event to use zope.component event
    subscribers; most frameworks using zope.lifecycleevent already do
    this, but we will configure this explicitly for documentation
    and testing purposes, only if not already enabled:

    >>> import zope.event
    >>> from zope.component import getGlobalSiteManager
    >>> gsm = getGlobalSiteManager()

    Importing zope.component.event puts dispatch() in zope.event.subscribers:

    >>> from zope.component import event
    >>> assert event.dispatch in zope.event.subscribers
    
    Now, let's register the handlers:

    >>> for h in (handle_create, handle_modify, handle_remove, handle_add):
    ...     gsm.registerHandler(h)
    ...

    Usually, these handlers will be registered in the global site manager
    via ZCML and zope.configuration, but they are registered in Python
    above for documentation/testing purposes.

    We can watch these event handlers get fired when CRUD methods are called.

    Object creation, with and without data:

    >>> newentry = container.create()      # should print 'object created'
    object created
    >>> another_uid = str(uuid.uuid4())
    >>> newentry = container.create({'count':88})
    object modified
    object created

    Object addition:

    >>> container.add(newentry)
    object added
    >>>

    Object removal:

    >>> del(container[newentry.record_uid])  # via __delitem__()
    object removed

    Object update (existing object):

    >>> entry = container.values()[0]
    >>> entry = container.update({'record_uid' : entry.record_uid,
    ...                            'title'      : u'Me'})
    object modified

    Object modified (new object or not contained):

    >>> random_uid = str(uuid.uuid4())
    >>> entry = container.update({'record_uid' : random_uid,
    ...                            'title'      : u'Bananas'})
    object modified
    object created
    object added

    Event handlers for modification can know what fields are modified; let's
    create a more interesting modification handler that prints the names of
    changed fields.

    >>> from zope.lifecycleevent.interfaces import IAttributes
    >>> unregistered = gsm.unregisterHandler(handle_modify)
    >>> @adapter(IRecord, IObjectModifiedEvent)
    ... def handle_modify(context, event):
    ...     if event.descriptions:
    ...         attr_desc = [d for d in event.descriptions
    ...                         if (IAttributes.providedBy(d))]
    ...         if attr_desc:
    ...             field_names = attr_desc[0].attributes
    ...         print tuple(field_names)
    >>> gsm.registerHandler(handle_modify)

    >>> entry = container.values()[0]
    >>> entry = container.update({'record_uid' : entry.record_uid,
    ...                            'title'      : u'Hello'})
    ('title',)

    Finally, clean up and remove all the dummy handlers:
    >>> for h in (handle_create, handle_modify, handle_remove, handle_add):
    ...     success = gsm.unregisterHandler(h)
    ...

    """

    implements(IRecordContainer)

    # whitelist types of objects to copy on data update:

    TYPE_WHITELIST = (int,
                      long,
                      str,
                      unicode,
                      bool,
                      float,
                      time.time,
                      datetime,
                      date,
                      timedelta,
                      decimal.Decimal,)

    SEQUENCE_WHITELIST = (list, tuple, set, frozenset, PersistentList,)

    MAPPING_WHITELIST = (dict, PersistentDict,)

    RECORD_INTERFACE = IRecord

    factory = Record

    def __init__(self, factory=Record, _impl=PersistentDict):
        self._entries = _impl()
        self._order = PersistentList()
        self.factory = factory

    # IWriteContainer methods:

    def _update_size(self):
        self._size = len(self._order)
        self._p_changed = True

    def __setitem__(self, key, value):
        if isinstance(key, uuid.UUID) or isinstance(key, unicode):
            key = str(key)
        elif not (isinstance(key, str) and len(key) == 36):
            raise KeyError('key does not appear to be string UUID: %s', key)
        if not self.RECORD_INTERFACE.providedBy(value):
            raise ValueError('Record value must provide %s' % (
                self.RECORD_INTERFACE.__identifier__))
        self._entries[key] = value
        if key not in self._order:
            self._order.append(key)
            self._update_size()

    def __delitem__(self, record):
        uid = record
        if self.RECORD_INTERFACE.providedBy(record):
            uid = str(record.record_uid)
        elif isinstance(record, uuid.UUID):
            uid = str(record)
        if not (isinstance(uid, str) and len(uid) == 36):
            raise ValueError('record neither record object nor UUID')
        if uid not in self._entries:
            raise ValueError('record not found contained within')
        if uid in self._order:
            self._order.remove(uid)
            self._update_size()
        if not self.RECORD_INTERFACE.providedBy(record):
            record = self._entries.get(uid)  # need ref for event notify below
        del(self._entries[uid])
        notify(ObjectRemovedEvent(record, self, uid))

    # IRecordContainer and IOrdered re-ordering methods:

    def reorder(self, record, offset):
        """
        Reorder a record (either UUID or object with record_uid attribute)
        in self._order, if record exists.  If no UUID exists in self._order,
        raise a ValueError.  Offset must be non-negative integer.
        """
        uid = record
        offset = abs(int(offset))
        if self.RECORD_INTERFACE.providedBy(record):
            uid = record.record_uid
        if not uid or uid not in self._order:
            raise ValueError('cannot find record to move for id %s' % uid)
        self._order.insert(offset, self._order.pop(self._order.index(uid)))

    def updateOrder(self, order):
        """Provides zope.container.interfaces.IOrdered.updateOrder"""
        if len(order) != len(self._order):
            raise ValueError('invalid number of keys')
        s_order = set(order)
        if len(order) != len(s_order):
            raise ValueError('duplicate keys in order')
        if s_order - set(self._order):
            raise ValueError('unknown key(s) provided in order')
        if not isinstance(order, PersistentList):
            order = PersistentList(order)
        self._order = order

    # IReadContainer interface methods:

    def get(self, uid, default=None):
        """
        Get object providing IRecord for given UUID uid or return None
        """
        if self.RECORD_INTERFACE.providedBy(uid):
            uid = uid.record_uid   # special case to support __contains__() impl
        v = self._entries.get(str(uid), default)
        if v and getattr(v, '_v_parent', None) is None:
            v._v_parent = self  # container marks item with itself as context
        return v

    def __contains__(self, record):
        """
        Given record as either IRecord object or UUID, is record contained?
        """
        if self.RECORD_INTERFACE.providedBy(record):
            return self.get(record, None) is not None
        return str(record) in self._entries

    def __len__(self):
        """
        return length of record entries
        """
        size = getattr(aq_base(self), '_size', None)
        return size if size is not None else len(self._order)

    def __getitem__(self, key):
        """Get item by UID key"""
        v = self.get(key, None)
        if v is None:
            raise KeyError('unknown UID for record entry')
        return v

    def keys(self):
        """return tuple with elements ordered"""
        return tuple(self._order)

    def values(self):
        """return tuple of records in order"""
        return tuple([t[1] for t in self.items()])

    def items(self):
        """return ordered pairs of key/values"""
        return tuple([(uid, self.get(uid)) for uid in self._order])

    def __iter__(self):
        return self._order.__iter__()

    # IRecordContainer-specific CRUD methods:

    def _type_whitelist_validation(self, value):
        vtype = type(value)
        if vtype in self.MAPPING_WHITELIST:
            for k, v in value.items():
                if not (k in self.TYPE_WHITELIST and v in self.TYPE_WHITELIST):
                    raise ValueError('Unsupported mapping key/value type')
        elif vtype in self.SEQUENCE_WHITELIST:
            for v in value:
                if v not in self.TYPE_WHITELIST:
                    raise ValueError('Unsupported sequence value type')
        else:
            if vtype not in self.TYPE_WHITELIST:
                raise ValueError('Unsupported data type')

    def _populate_record(self, record, data):
        """
        Given mapping of data, copy values to attributes on record.

        Subclasses may override to provide schema validation, selective
        copy of names, and normalization of values if/as necessary.
        """
        changelog = []
        for key, value in data.items():
            if key.startswith('_'):
                continue  # invalid key
            if key == 'record_uid':
                self.record_uid = str(value)
                continue
            try:
                self._type_whitelist_validation(value)
            except ValueError:
                continue  # skip problem name!
            existing_value = getattr(self, key, None)
            if value != existing_value:
                changelog.append(key)
                setattr(record, key, value)
        if changelog:
            record._p_changed = True
            changelog = [
                Attributes(self.RECORD_INTERFACE, name)
                for name in changelog
                ]
            notify(ObjectModifiedEvent(record, *changelog))

    def create(self, data=None):
        """
        Alternative factory for an IRecord object, does not store object.
        If data is not None, copy fields from data.
        """
        if data is None:
            data = {}
        uid = data.get('record_uid', str(uuid.uuid4()))  # get or random uuid
        record = self.factory(context=self, uid=uid)
        if data and (hasattr(data, 'get') and
                     hasattr(data, 'items')):
            self._before_populate(record, data)
            self._populate_record(record, data)
        notify(ObjectCreatedEvent(record))
        return record

    def add(self, record):
        """
        Add a record to container, append UUID to end of order; over-
        write existing entry if already exists for a UUID (in such case
        leave order as-is).
        """
        uid = str(record.record_uid)
        if not uid:
            raise ValueError('record has empty UUID')
        self._entries[uid] = record
        if uid not in self._order:
            self._order.append(uid)
            self._update_size()
        notify(ObjectAddedEvent(record, self, uid))

    def _ad_hoc_fieldlist(self, record):
        attrs = [name for name in dir(record) if not name.startswith('_')]
        fieldnames = []
        for name in attrs:
            v = getattr(record, name)
            try:
                self._type_whitelist_validation(v)
                fieldnames.append(name)
            except ValueError:
                pass  # ignore name
        return fieldnames

    def _filtered_data(self, data):
        fieldnames = self._ad_hoc_fieldlist(data)
        if IRecord.providedBy(data):
            return dict([(k, getattr(data, k, None)) for k in fieldnames])
        return dict([(k, data.get(k, None)) for k in fieldnames])

    def _before_populate(self, record, data):
        pass  # hook for subclasses

    def _before_update_notification(self, record, data):
        pass  # hook for subclasses

    def notify_data_changed(self):
        notify(
            ObjectModifiedEvent(self, Attributes(IRecordContainer, 'items'))
            )

    def update(self, data, suppress_notify=False):
        """
        Given data, which may be a dict of field key/values or an actual
        IRecord providing object, update existing entry given a UUID, or
        add the entry if an entry for that UUID does not yet exist.  The
        update should copy all values for every key provided.  Specialized
        or schema-bound subclasses of this interface may execute more
        elaborate rules on what data is copied and how it is normalized.

        Pre-condition:

          * All new (added) entries updated this way must contain a record_uid
            field with a string UUID.

        Post-condition:

          * New items should always be handled through self.create() and then
            self.add().

          * Method returns modified record.

          * Should notify at least zope.lifecycleevent.IObjectModifiedEvent,
            (if changes, detection of which is left up to implementation).

          * On creation of new records, should notify both
            IObjectCreatedEvent and IObjectAddedEvent (the record container
            is the context of record).

        """
        if self.RECORD_INTERFACE.providedBy(data):
            uid = data.record_uid
            data = self._filtered_data(data)
        else:
            uid = data.get('record_uid', None)
        if uid is None:
            raise ValueError('empty record UID on update')
        uid = str(uid)
        record = self.get(uid, None)
        if record is not None:
            # existing record, already known/saved
            self._before_populate(record, data)
            self._populate_record(record, data)  # also notifies modified event
        else:
            # new, create, then add
            record = self.create(data)  # notifies created, modified for record
            self.add(record)            # notified added event
        self._before_update_notification(record, data)
        if (not suppress_notify) and getattr(record, '_p_changed', None):
            self.notify_data_changed()
        return record

    def _process_container_metadata(self, data):
        return False  # hook for subclasses

    def update_all(self, data):
        """
        Given sequence of data dictionaries or a JSON serialization
        thereof, update each item.  Raises ValueError on missing UID of
        any item/entry.  Also supports JSON serialization of a single
        record/entry dict.
        """
        _modified = False
        if isinstance(data, basestring):
            _data = json.loads(data)
            if isinstance(_data, dict):
                # dict might be singluar item, or wrapping object; a wrapping
                # object would have a list called 'entries'
                if 'entries' in _data and isinstance(_data['entries'], list):
                    _modified = self._process_container_metadata(_data)
                    # wrapper, get entries from within.
                    _data = _data['entries']
                else:
                    # singular record, not a wrapper
                    _data = [_data]  # wrap singular item update in list
            _keynorm = lambda o: dict([(str(k), v) for k, v in o.items()])
            data = [_keynorm(o) for o in _data]
        uids = [str(o['record_uid']) for o in data]
        existing_uids = set(self.keys())
        added_uids = set(uids) - existing_uids
        modified_uids = set(uids).intersection(existing_uids)
        for entry_data in data:
            if 'record_uid' not in entry_data:
                raise ValueError('record missing UID')
            record = self.update(entry_data, suppress_notify=True)
            if not _modified and getattr(record, '_p_changed', None):
                _modified = True
        remove_uids = existing_uids - set(uids)
        for deluid in remove_uids:
            del(self[deluid])  # remove any previous entries not in the form
        self._order = PersistentList(uids)  # replace old with new uid order
        if added_uids or modified_uids:
            _modified = True
        if data and _modified:
            self.notify_data_changed()  # notify just once
Esempio n. 18
0
class PSetList(Persistent):
    """PSetList is a persistent set list object that mostly acts just like a normal Python list for PSet objects.
    These lists can be saved in the database just like any other persistent objects. It can optionally be initialized
    with another list of PSet objects and a name. Additionally, it will also have an attribute 'creation_date' and
    a unique uuid attribute 'id'. PSetLists are considered equal if they have the same 'id'.

    Except for the usual list methods like 'extend' and 'append', the PCardList is functional in style, meaning that
    calling any of the other filtering or querying methods return new PCardList objects leaving the original untouched.

    args:
        sets (PSetList, PersistentList[PSet], list[PSet], tuple[PSet]): Initial sets of the list.
        name (str): Name of the set list.
    """

    def __init__(self, sets=None, name=''):
        if isinstance(sets, PSetList):
            self._sets = PersistentList(sets.sets)
        elif isinstance(sets, (PersistentList, list, tuple)):
            self._sets = PersistentList(sets)
        elif not sets:
            self._sets = PersistentList()
        else:
            raise TypeError

        self.name = name
        self.creation_date = datetime.datetime.now()
        self.id = uuid.uuid4()

    def __getitem__(self, item):
        if isinstance(item, int):
            return self._sets.__getitem__(item)
        else:
            return PSetList(self._sets.__getitem__(item))

    def __setitem__(self, key, value):
        self._sets.__setitem__(key, value)

    def __iter__(self):
        return iter(self._sets)

    def __str__(self):
        return str(self._sets)

    def __repr__(self):
        return repr(self._sets)

    def __add__(self, other):
        if isinstance(other, PSetList):
            return PSetList(self.sets + other.sets)
        elif isinstance(other, (PersistentList, list, tuple)):
            return PSetList(self.sets + other)
        elif isinstance(other, PSet):
            new_sets = PersistentList(self.sets)
            new_sets.append(other)
            return PSetList(new_sets)
        else:
            raise TypeError

    def __radd__(self, other):
        if isinstance(other, PSetList):
            return PSetList(self.sets + other.sets)
        elif isinstance(other, (PersistentList, list, tuple)):
            return PSetList(self.sets + other)
        elif isinstance(other, PSet):
            new_sets = PersistentList(self.sets)
            new_sets.append(other)
            return PSetList(new_sets)
        else:
            raise TypeError

    def __iadd__(self, other):
        if isinstance(other, PSetList):
            return PSetList(self.sets + other.sets)
        elif isinstance(other, (PersistentList, list, tuple)):
            return PSetList(self.sets + other)
        elif isinstance(other, PSet):
            new_sets = PersistentList(self.sets)
            new_sets.append(other)
            return PSetList(new_sets)
        else:
            raise TypeError

    def __len__(self):
        return len(self.sets)

    def __contains__(self, pset):
        return self.sets.__contains__(pset)

    def __eq__(self, other):
        if isinstance(other, PSetList):
            return self.id == other.id

    def append(self, pset):
        """Appends the given set object to this list in-place.

        Args:
            pset (PSet): The set object to append.
        """
        self.sets.append(pset)

    def extend(self, psets):
        """Extends the list with a list of set objects in-place.

        Args:
            psets (PSetList, list, tuple, PersistentList): A PSetList, PersistentList, list or a tuple of
                set objects to extend this list with.
        """
        if isinstance(psets, PSetList):
            self.sets.extend(psets.sets)
        elif isinstance(psets, (PersistentList, list, tuple)):
            self.sets.extend(psets)
        else:
            raise TypeError

    def insert(self, index, pset):
        """Inserts a set object to a given index in this list in-place.

        Args:
            pset (PSet): The set object to be inserted in the given index in this list.
            index (int): The index to insert the given set object.
        """
        self._sets.insert(index, pset)

    def index(self, pset):
        """Returns the index where the given set object is located in this list.

        Args:
            pset (PSet): The set object to be searched.
        """
        self._sets.index(pset)

    def clear(self):
        """Clears this list."""
        self._sets.clear()

    def remove(self, pset):
        """Removes a given set from this list in-place.

        Args:
            pset (PSet): A set object to remove from this list.
        """
        self._sets.remove(pset)

    def pop(self, index):
        """Removes a set from a given index from this list in-place.

        Args:
            index (int): An index to remove a set from.
        """
        self._sets.pop(index)

    def count(self, pset):
        """Returns the number of given set objects in this list. Sets are considered same if they have the same code.

        Args:
            pset (pset): A set object to count.

        Returns:
            int: The number of given set objects in this list
        """
        return self._sets.count(pset)

    def sort(self, func):
        """Sorts the sets of this list with a given function in-place. The given function should return some
        attribute of a set object by which this list is sorted.

        Args:
            func: A function to sort this list with.
        """
        self._sets.sort(key=func)

    def filter(self, func):
        """Filters the sets of this list with a given function in-place. The new list contains all the cards
        for which the given function returns True.

        Args:
            func: A function to filter with.
        """
        self._sets.filter(key=func)

    def sorted(self, func):
        """Returns a new list with the sets of this list sorted with a given function. The given function should return
        some attribute of a set object by which this list is sorted.

        Args:
            func: A function to sort this list with.

        Returns:
            PCardList: A new instance of this list sorted.

        """
        return PSetList(sorted(self.sets, key=func))

    def filtered(self, func):
        """Returns a new list filtered with a given function. The new list contains all the sets
        for which the given function returns True.

        Args:
            func: A function to filter with.

        Returns:
            PCardList: A new instance of this list filtered.

        """
        return PSetList(list(filter(func, self.sets)))

    def where(self, invert=False, **kwargs):
        """Returns a new list of sets for which any of the given keyword arguments match partly or completely with the
        attributes of the sets in this list. The arguments should be any set attribute names such as 'name',
        'type' and 'block'. String attributes are case insensitive and it is enough that the argument is a
        substring. For list arguments the order does not matter and it is enough for one of the elements to match.

        The search can also be inverted by setting invert=True so that all the cards NOT matching will be returned.

        Note that searching for Null arguments is not supported.

        Args:
            invert: If True, a list of sets NOT matching the arguments is returned.
            **kwargs: Arguments to match with the attributes of this list's sets.

        Returns:
            bool: A new list of sets for which any of the given keyword arguments match partly or completely.
        """
        del_keys = []

        for (key, val) in kwargs.items():
            if not val:
                msg = 'Ignoring an empty or null value for keyword {}. Null or empty values are not supported.'
                warnings.warn(msg.format(key))
                del_keys.append(key)
            elif len(self.sets) == 0:
                msg = 'Searching an empty list.'
                warnings.warn(msg)
            elif not hasattr(self.sets[0], key):
                msg = 'Ignoring an unrecognized keyword {}. Make sure you are using correct api type and spelling.'
                warnings.warn(msg.format(key))
                del_keys.append(key)

        for key in del_keys:
            del kwargs[key]

        if not invert:
            return PSetList([pset for pset in self if pset.matches_any(**kwargs)])
        else:
            return PSetList([pset for pset in self if not pset.matches_any(**kwargs)])

    def where_exactly(self, invert=False, **kwargs):
        """Returns a new list of sets for which all of the given keyword arguments match completely with the attributes
        of the sets in this list. The arguments should be any set attribute names such as 'name', 'type' and 'block'.
        String attributes are case insensitive and must match exactly. For list arguments the order does not
        matter and and each element must match exactly.

        The search can also be inverted by setting invert=True so that all the cards NOT matching will be returned.

        Note that searching for Null arguments is not supported.

        Args:
            invert: If True, a list of sets NOT matching the arguments is returned.
            **kwargs: Arguments to match with the attributes of this list's cards.

        Returns:
            bool: A new list of sets for which all of the given keyword arguments match completely.
        """
        del_keys = []

        for (key, val) in kwargs.items():
            if not val:
                msg = 'Ignoring an empty or null value for keyword {}. Null or empty values are not supported.'
                warnings.warn(msg.format(key))
                del_keys.append(key)
            elif len(self.sets) == 0:
                msg = 'Searching an empty list.'
                warnings.warn(msg)
            elif not hasattr(self.sets[0], key):
                msg = 'Ignoring an unrecognized keyword {}. Make sure you are using correct api type and spelling.'
                warnings.warn(msg.format(key))
                del_keys.append(key)

        for key in del_keys:
            del kwargs[key]

        if not invert:
            return PSetList([pset for pset in self if pset.matches_all(**kwargs)])
        else:
            return PSetList([pset for pset in self if not pset.matches_all(**kwargs)])

    def pprint(self):
        """Prints out the contents of this list in a nice readable way."""
        print(self.pprint_str())

    def pprint_str(self):
        """Returns a nice readable string of the contents of this list.

        Returns:
            str: a string of the contents of this list in a nice readable format.
        """

        if len(self) == 0:
            if self.name:
                return 'Empty set list "{}" created at {}\n'.format(self.name, str(self.creation_date))
            else:
                return 'Unnamed empty set list created at {}\n'.format(self.creation_date)

        pp_str = ''

        if self.name:
            pp_str += 'Set list "{}" created at {}\n'.format(self.name, str(self.creation_date))
        else:
            pp_str += 'Unnamed set list created at {}\n'.format(self.creation_date)

        longest_name = max(len(pset.name) for pset in self.sets)
        longest_type = max(len(getattr(pset, 'set_type', getattr(pset, 'type', ''))) for pset in self.sets)
        longest_block = max(len(pset.block) if pset.block else 0 for pset in self.sets)
        longest_code = max(len(pset.code) if pset.code else 0 for pset in self.sets)

        pp_str += '-' * (longest_name + longest_type + longest_block + longest_code + 17)
        pp_str += '\n'

        format_str = '{name:{w1}s}   {code:{w2}s}   {block:{w3}s}   {type:{w4}s}   {cards}\n'
        pp_str += format_str.format(name='Set',
                                    w1=longest_name,
                                    code='Code',
                                    w2=longest_code,
                                    block='Block',
                                    w3=longest_block,
                                    type='Type',
                                    w4=longest_type,
                                    cards='Cards')
        pp_str += '-' * (longest_name + longest_type + longest_block + longest_code + 17)
        pp_str += '\n'

        for pset in self.sets:
            format_str = '{name:{w1}s}   {code:{w2}s}   {block:{w3}s}   {type:{w4}s}   {cards}\n'
            pp_str += format_str.format(name=pset.name,
                                        w1=longest_name,
                                        code=pset.code,
                                        w2=longest_code,
                                        block=pset.block if pset.block else '',
                                        w3=longest_block,
                                        type=getattr(pset, 'set_type', getattr(pset, 'type', '')),
                                        w4=longest_type,
                                        cards=len(pset))

        return pp_str

    @property
    def api_type(self):
        try:
            return self.sets[0].api_type
        except IndexError:
            return 'unspecified'

    @property
    def json(self):
        pset_json_dicts = []

        for pset in self.sets:
            json_dict = dict(pset.__dict__)
            del json_dict['_cards']
            del json_dict['_sideboard']
            del json_dict['creation_date']
            del json_dict['id']

            if len(pset) > 0:
                json_dict['cards'] = [card.__dict__ for card in pset.cards]
                pset_json_dicts.append(json_dict)

        return json.dumps({'sets': pset_json_dicts}, sort_keys=True, indent=4)

    @property
    def sets(self):
        return self._sets

    @sets.setter
    def sets(self, sets):
        if isinstance(sets, PSetList):
            self._sets = PersistentList(sets.sets)
        elif isinstance(sets, (list, PersistentList, tuple)):
            self._sets = PersistentList(sets)
        elif not sets:
            self._sets = PersistentList()
        else:
            raise TypeError
Esempio n. 19
0
class TicketTracker(QonPersistent, Watchable):
    """A ticket/issue tracking system."""

    persistenceVersion = 2
    
    def __init__(self, name='', group=None):
        Watchable.__init__(self)
        self.group = group
        self.name = name
        self.modified = never
        self.__items = PersistentList()
        self.__categories = PersistentList()

    def upgradeToVersion1(self):
        self.name = iso_8859_to_utf_8(self.name)
        
    def add_category(self, category):
        if category not in self.__categories:
            self.__categories.append(category)
        
    def remove_category(self, category):
        if category in self.__categories:
            self.__categories.remove(category)
        
    def get_categories(self, sorted=0):
        cats = self.__categories[:]
        if sorted:
            cats.sort()
        return cats
        
    def add_ticket(self, ticket):
        self.__items.append(ticket)
        self.modified = datetime.utcnow()
        self.watchable_changed(self.modified)
        
    def new_ticket(self, user, title='', category="feature", priority = 3, text=''):
        """Create a new ticket and return it."""
        ticket = Ticket(user, title, category, priority, text)
        self.add_ticket(ticket)
        user.karma_activity_credit()
        return ticket
        
    def get_ticket(self, id):
        """Return a ticket with the given id (index)."""
        return self.__items[id]
                
    def get_index(self, ticket):
        """Return index of ticket. Uses cached value if available."""
        if hasattr(ticket, '_v_index'):
            return ticket._v_index
        else:
            return self.__items.index(ticket)
            
    def new_tickets(self):
        """Return new tickets."""
        return self._tickets_by_state(['new'])

    def open_tickets(self):
        """Return open tickets."""
        return self._tickets_by_state(['open'])
        
    def closed_tickets(self):
        """Return closed tickets."""
        return self._tickets_by_state(['closed'])
        
    def active_tickets(self):
        """Return tickets not new or closed."""
        return self._tickets_by_state(['open', 'assigned', 'feedback'])
        
    def owned_tickets(self, user, only_open=0):
        """Return tickets owned (submitted) by user."""
        items = []
        for i, t in enumerate(self.__items):
            if t.user is user and (not only_open or not t.is_closed()):
                t._v_index = i
                items.append(t)
        return items        
        
    def assigned_tickets(self, user, only_open=0):
        """Return tickets assigned to user, regardless of state."""
        items = []
        for i, t in enumerate(self.__items):
            if t.assignee is user and (not only_open or not t.is_closed()):
                t._v_index = i
                items.append(t)
        return items        
        
    def feedback_tickets(self, user, only_open=0):
        """Return tickets awaiting feedback from user."""
        items = []
        for i, t in enumerate(self.__items):
            if t.is_feedback() and (t.user is user) and (not only_open or not t.is_closed()):
                t._v_index = i
                items.append(t)
        return items        
        
    def only_open(self, tickets):
        """Given a list of tickets, eliminate closed tickets."""
        return [t for t in tickets if not t.is_closed()]
        
    def sort_by_modified(self, tickets):
        """Given a list of tickets, return sorted newest to oldest by modified."""
        bydate = [(t.modified or t.date, t) for t in tickets]
        bydate.sort()
        bydate.reverse()
        return [t for date, t in bydate]
        
    def last_modified(self):
        """Compute and cache last_modified from tickets.
        """
        if self.modified:
            return self.modified
            
        latest = never
        for t in self.__items:
            if t.modified > latest:
                latest = t.modified
                
        self.modified = latest
        return self.modified
        
    def watchable_name(self):
        return self.name
        
    def watchable_modified_date(self):
        return self.last_modified()
        
    # ticket methods
    
    def add_comment(self, ticket, user, category, priority, text):
        ticket.add_comment(user, category, priority, text)
        self.modified = datetime.utcnow()
        self.watchable_changed(self.modified)
 
    def change_status(self, ticket, user, status, category, priority, text):
        ticket.change_status(user, status, category, priority, text)
        self.modified = datetime.utcnow()
        self.watchable_changed(self.modified)        
        
    def _tickets_by_state(self, state):
        items = []
        for i, t in enumerate(self.__items):
            if t.status in state:
                t._v_index = i
                items.append(t)
        return items
Esempio n. 20
0
class PollContainer(QonPersistent, Watchable):
    """Maintains list of Poll items for a Group."""
    
    _karma_new_item = 1     # cost to create a new poll
    
    def __init__(self, ihb):
        """Create a Polls belonging to an IHasBlog."""
        Watchable.__init__(self)
        self.ihb = ihb
        self.__polls = PersistentList()
    
    def can_pay_for_new_item(self, user):
        if not user:
            return False
        return user.can_give_karma(self._karma_new_item)
        
    def add_poll(self, poll):
        """Add new Poll. Returns None if poll was not added."""
        
        if not _ROLLOUT_TEST:
            # charge creator for item - don't create if can't pay
            try:
                poll.creator.pay_karma(self._karma_new_item)
            except NoKarmaToGive:
                return None
            
        # karma credit
        poll.creator.karma_activity_credit()

        self.__polls.append(poll)
        
        # set Poll's refs back to me
        poll.container = self
        poll._set_item_index(self.__polls.index(poll))
        
        self.watchable_changed(poll.date)
        poll.creator.karma_activity_credit()
        return poll
    
    def get_poll(self, index):
        try:
            return self.__polls[index]
        except IndexError:
            return None
    
    def active_polls(self):
        """Return list of active polls [(end_date, poll), ...], sorted by most-recent end date."""
        now = datetime.utcnow()
        bydate = [(p.end_date, p) for p in self.__polls if p.is_active(now)]
        bydate.sort()
        return bydate
    
    def completed_polls(self):
        """Return list of completed polls [(end_date, poll), ...] sorted by most recent end date."""
        now = datetime.utcnow()
        bydate = [(p.end_date, p) for p in self.__polls if not p.is_active(now)]
        bydate.sort()
        bydate.reverse()
        return bydate

    def get_polls(self):
        return self.__polls
    
    def watchable_name(self):
        return self.ihb.name + ' Polls'

    def watchable_changed(self, now=None):
        # tells group he has changed, too
        Watchable.watchable_changed(self, now)
        self.ihb.watchable_changed(now)
Esempio n. 21
0
class WikiPage(QonPersistent, Watchable, qon.karma.HasKarma, IHasBlog):

    persistenceVersion = 4

    def __init__(self, wiki, name=''):
        Watchable.__init__(self)
        qon.karma.HasKarma.__init__(self)
        self.wiki = wiki
        self.outbound_references = None
        self.inbound_references = None
        self.name = clean_page_name(name)
        self.versions = PersistentList()
        self.blog = Blog(self)
        self.locked_by_user = None
        self.__cached_html = PersistentCache(self._update_html_cache)
        self.__cached_html2 = PersistentCache(self._update_html2_cache)
        self.new_revision(force_new=1)
        
    def upgradeToVersion4(self):
        self.inbound_references = None
        self.version_upgrade_done()

    def upgradeToVersion3(self):
        self.__cached_html2 = PersistentCache(self._update_html2_cache)
        self.version_upgrade_done()

    def upgradeToVersion2(self):
        self.__cached_html = PersistentCache(self._update_html_cache)
        self.version_upgrade_done()

    def upgradeToVersion1(self):
        self.blog.ihb = self
        self.version_upgrade_done()
        
    def __repr__(self):
        return '<%s object at 0x%x: %s>' % (self.__module__ + '.' + self.__class__.__name__,
            id(self), self.name or "*no name*")

    def new_revision(self, set_date=True, author=None, title='', raw='', force_new=0):
        """Create a new revision for this page.
        
        Check to make sure that the new text is actually different
        from the latest revision.  If it's not, then don't bother creating a
        new revision."""
        if force_new or (self.versions[-1].get_raw() != raw):
            w = WikiVersion(page=self, author=author, title=title, raw=raw)
            if set_date:
                w.set_date(datetime.utcnow())
            self.versions.append(w)
            
            self.watchable_changed(w.date)
            if author:
                author.karma_activity_credit()
                
            # before invalidating referring pages, we want to 
            # update the html cache, which has the side effect
            # of updating the outbound references.
            self.invalidate_html_cache()
            unused_html = self.get_cached_html()

            # may seem useless for new pages, but we could be creating
            # a new page that was referred to from another page somewhere
            self._invalidate_referring_pages()
            
            self._p_changed = 1
            
    def _invalidate_referring_pages(self, all_groups=0):
        """Invalidate HTML cache of pages which refer to this one."""

        # we changed default behavior to not scan all groups when invalidating.
        # this means that cross-group links for new pages after this change
        # will not be accurate, until the page(s) linking to the new page
        # is itself modified.

        refs = self.wiki.references_to(self, all_groups=all_groups)
        for p in refs:
            p.invalidate_html_cache()
        
    def latest_edit_by(self, user):
        """Return latest edit by user, or None."""
        rvers = self.versions[:]
        rvers.reverse()
        for version in rvers:
            if version.author is user:
                return version
        return None
        
    def get_comments(self):
        """Return list of comments (BlogItems)."""
        blog_item = self.blog.get_item(0)
        if blog_item:
            return blog_item.get_comments()
        else:
            return []
        
    def get_revision(self, rev_id):
        """Return revision index rev_id or None."""
        rev_id = max(0, rev_id)
        try:
            rev = self.versions[rev_id]
        except IndexError:
            rev = None
        return rev

    def revision_index(self, version):
        """Return revision index of version, or raise ValueError."""
        return self.versions.index(version)

    def merge_revisions(self, base, old, new):
        """Merge the newest revision with older revision, off of base. Returns (merged text, exit_code) or None.
        
        Base may be -1 to signify empty text.
        Exit code is 0 for no conflicts, or 1 if conflicts exist.
        """
        if len(self.versions) < 2:
            return None

        if base == -1:
            base_text = ''
        else:
            base_text = self.versions[base].get_raw()

        old_text = self.versions[old].get_raw()
        new_text = self.versions[new].get_raw()

        merger = Merger(base_text, old_text, new_text)
        merged = merger.merge('Revision %d' % base,
            'Revision %d' % old,
            'Revision %d' % new,
            )

        if not merged:
            return None

        exit_code = 0
        if merger.has_conflicts():
            exit_code = 1

        return (merged, exit_code)


    def watchable_name(self):
        #return self.wiki.group.name + ' ' + self.versions[-1].title
        return self.versions[-1].title or self.name
        
    def watchable_changed(self, now=None):
        # wiki changed, too
        Watchable.watchable_changed(self, now)
        self.wiki.watchable_changed(now)

    def watchable_modified_date(self):
        return self.watchable_last_change()
        
    def last_modified(self):
        sys.stderr.write('WARNING: using deprecated qon.wiki.WikiPage.last_modified.')
        return self.watchable_last_change()

    def who_has_lock(self):
        return self.locked_by_user

    def can_edit(self, user):
        """ A page is editable if either it's not locked by anybody,
        or if the requesting user is the one who holds the lock, or
        if the user is allowed to edit within the group"""
        
        # user must be logged in to edit
        if not user:
            return False
            
        # check lock
        if (self.locked_by_user) and (self.locked_by_user is not user) and (not self.can_manage(user)):
            return False
            
        if self.wiki.group.can_edit(user):
            return True
        
        return False
        
    def can_show(self):
        """Return False if this item should be suppressed due to feedback score."""
        if self.get_karma_score() < qon.karma.min_karma_to_show:
            return False
        return True
        

    def can_lock(self, user):
        """ For now, let only a group owner lock/unlock a page.
        In the future, we may want to consider allowing the original
        page author to lock/unlock as well."""
        return self.wiki.group.is_owner(user)

    def lock(self, user):
        if self.can_lock(user):
            self.locked_by_user = user

    def unlock(self, user):
        if self.can_lock(user):
            self.locked_by_user = None
            
    def can_get_karma_from(self, other):
        return other is not None
        
    # HTML cache methods
    
    def add_html_dependency(self, target):
        """Adds target as something self depends on for its HTML cache."""
        self.__cached_html.add_dependency(target)
        self.__cached_html2.add_dependency(target)
        
    def invalidate_html_cache(self):
        self.__cached_html.flush()
        self.__cached_html2.flush()
        
    def get_cached_html(self):
        return self.__cached_html.get()

    def get_cached_html2(self):
        return self.__cached_html2.get()

    def _update_html_cache(self):
        
        v = self.versions[-1]
        html = v.raw_to_html(v.get_raw())
        
        # take this opportunity to update the page's outbound references
        if hasattr(v, '_v_references'):
            self.set_outbound_references(v._v_references)
            del v._v_references
            
        return html

    def _update_html2_cache(self):
        
        v = self.versions[-1]
        html = v.raw_to_html(v.get_raw(), suppress_tooltip=1)
        
        return html

    def disable_cache(self):
        self.__cached_html.disable_cache()
        self.__cached_html2.disable_cache()

    def cache_disabled(self):
        return self.__cached_html.cache_disabled() or self.__cached_html2.cache_disabled()
  
    def get_ref(self):
        """Return a reference (group, page_name) to this page, for use in
        outbound/inbound references."""
        return (self.wiki.group, self.name)
            
    def set_outbound_references(self, new_out_refs):
        """Record new outbound references."""

        # filter non-existent cross-group page refs out of new_out_refs
        # this interacts with the change that no longer scans all groups
        # for references to new pages. if a cross-group link existed to a new
        # page, this method (pre-filtering) would have neglected to add the inbound
        # link from the cross-group reference, even if both pages had been edited.
        l = []
        for r in new_out_refs:
            group_name, page_name = r
            if not group_name:
                l.append(r)
            else:
                page = _ref_to_page(r, self.wiki.group)
                if page:
                    l.append(r)
        new_out_refs = l

        # get old outbound refs
        old_out_refs = self.outbound_references or []

        # get two lists: items that used to be outbound references but
        # are no longer (old_not_new), and new outbound references that
        # weren't there before (new_not_old)
        old_not_new, new_not_old = xor_lists(old_out_refs, new_out_refs)

        # pre-fill reference to me
        me_ref = self.get_ref()

        # invalidate inbound references of pages that we no longer refer to
        for ref in old_not_new:
            page = _ref_to_page(ref, self.wiki.group)
            if page:    # added by Alex
                page.remove_inbound_reference(me_ref)

        # add inbound references for pages we've added outbound links to
        for ref in new_not_old:
            page = _ref_to_page(ref, self.wiki.group)
            if page:    # could be ref to new page
                page.add_inbound_reference(me_ref)

        # record new outbound references
        self.outbound_references = PersistentList()
        self.outbound_references.extend(new_out_refs)

    def remove_inbound_reference(self, ref):
        if self.inbound_references is not None:
            if ref in self.inbound_references:
                self.inbound_references.remove(ref)

    def add_inbound_reference(self, ref):
        if self.inbound_references is not None:
            if ref not in self.inbound_references:
                self.inbound_references.append(ref)

    # IHasBlog methods not implemented by other base classes
    
    def can_manage(self, user):
        """Who can manage this blog? Group owners."""
        return self.wiki.group.is_owner(user)
        
    def can_read(self, user):
        return self.wiki.can_read(user)
        
    def can_delete_item(self, item):
        """Can't delete item 0, which holds page comments."""
        if self.blog.get_item(0) is item:
            return False
        return True
        
    def can_create_item(self):
        """Users aren't allowed to create new topics in wiki pages."""
        return False
    
    def is_accepted(self):
        return self.wiki.group.is_accepted()
    
    def get_owners(self):
        return self.wiki.group.get_owners()
    
    def is_owner(self, user):
        return self.wiki.group.is_owner(user)
        
    def get_title(self):
        # this is here and in BlogItem
        return self.versions[-1].title or self.name
    
    def get_blog(self):
        return self.blog
    
    def get_wiki(self):
        return self.wiki
    
    def get_name(self):
        return self.name
    
    def get_all_owners(self):
        return self.get_owners()
    
    def get_all_blogs(self):
        return [self.blog]
        
    def get_member_list(self):
        return self.wiki.group.get_member_list()
Esempio n. 22
0
class Blog(QonPersistent, Watchable):
    """Contains a blog.
    """
    persistenceVersion = 4
    
    _karma_new_item             = 1
    _inactive_period            = timedelta(days=7) # period after which item is considered inactive
    _inactive_karma_discount    = 1                 # daily karma score decay of inactive items
    
    def __init__(self, ihb):
        """Create a blog belonging to IHasBlog."""
        Watchable.__init__(self)
        self.ihb = ihb
        self.__items = PersistentList()
        self.__main_item = None

    def upgradeToVersion4(self):
        self.__main_item = None
        self.version_upgrade_done()
        
    def upgradeToVersion3(self):
    
        # move all data from ReaderList into BlogItems
        from qon.base import get_user_database
        user_db = get_user_database()
        
        for user_id, reader in self.__reader_list.readers.iteritems():
            user = user_db.get_user(user_id)
            if not user:
                continue
            user_oid = unpack_oid(user._p_oid)
            
            for item_id, data in reader.read_items.iteritems():
                dt, count = data
                item = self.get_item(item_id)
                
                item._BlogItem__user_access[user_oid] = (dt, count)
    
        del self.__reader_list
        self.version_upgrade_done()
        
    def upgradeToVersion2(self):
        self.__reader_list = ReaderList()
        self.version_upgrade_done()
        
    def upgradeToVersion1(self):
        self.ihb = self.group
        del self.group
        self.version_upgrade_done()
        
    def can_pay_for_new_item(self, user):
        return user.can_give_karma(self._karma_new_item)
        
    def new_item(self, author, title, summary, main='', no_mod=1, no_pay=0):
        """Create a new blog item. If no_mod, don't assign negative karma."""
        
        # charge author for new item - don't create item if can't pay
        # no charge for posting to author's blog
        if (not no_pay) and (self is not author.blog):
            try:
                author.pay_karma(self._karma_new_item)
            except qon.karma.NoKarmaToGive:
                return None
        
        # create the item
        item = BlogItem(blog=self,
            author=author,
            title=title,
            summary=summary,
            main=main)
        if not no_mod:
            self.assign_new_karma(item)
        
        # add the item
        self.add_item(item)
        
        # assign activity credit
        author.karma_activity_credit()
        
        # pretend the author has read the item he just created
        item.read_item(author)
        
        # notify IHB that we created an item
        self.ihb.notify_new_item(item)
        return item
        
    def add_item(self, item):
        self.__items.append(item)
        self.watchable_changed(item.date)
    
    def assign_new_karma(self, item):
        """Assign default karma level for new items."""
        
        if self.ihb.is_owner(item.author):
            # owner items get no negative karma
            return
        
        member_count = len(self.ihb.get_member_list())
        karma = member_count // 5
        
        if karma < 2:
            karma = 2
        
        item.add_anon_karma(-karma)
        
    def decay_inactive_items(self):
        """Call this daily to decay karma of inactive items.
        Returns a list of items that got decayed.
        """
        decayed_items = []
        decay_time = datetime.utcnow() - self._inactive_period
        for item in self.__items:
            if item.get_karma_score() > 0:
                if item.watchable_last_change() < decay_time:
                    item.add_anon_karma(-self._inactive_karma_discount)
                    decayed_items.append(item)
        return decayed_items

    def last_modified(self):
        """Return datetime of last modification."""
        sys.stderr.write('WARNING: using deprecated qon.blog.Blog.last_modified.')
        return self.watchable_last_change()
        
    def recent_items(self, count=10, consider_comments=True):
        """Return count most recent items, from newest to oldest."""
        items = [i for date, i in self.recent_items_with_date(count=count,
            consider_comments=consider_comments)]
        items.reverse()
        return items
        
    def recent_items_with_date(self, count=10, consider_comments=True):
        """Return count most recent items as tuples: (date, item).
        
        If consider_comments is True, last-mod date is the latest of a posted
        comment or an edit to the original item. Otherwise, last-mod date is
        date of original posting only, even if the item has been edited.
        """
        if consider_comments:
            # return from cache if possible
            if hasattr(self, '_cached_recent_items_with_date'):
                return self._cached_recent_items_with_date[-count:]

            bydate = [(i.last_modified(consider_comments=consider_comments), i) \
                for i in self.get_items()]
            bydate.sort()

            # cache result
            self._cached_recent_items_with_date = bydate
        else:
            bydate = [(i.date, i) for i in self.get_items()]
            bydate.sort()

        return bydate[-count:]
        
    def unread_items(self, user):
        """Return items which have been unread or updated since last read
        by user. Returns [(i.last_modified(), i), ... for unread items].
        """
        if not user:
            return []

        return [(i.last_modified(), i) for i in self.get_items() \
            if not i.has_read_item(user, updated=i.last_modified())]

    def mark_items_as_read(self, user, as_of=None):
        """Mark all items in this blog as having been read by user.

        If as_of is not None, mark them read as of the provided datetime.
        """

        now = as_of or datetime.utcnow()

        for item in self.get_items():
            item.read_item(user, now)

    def recent_items_by_author(self, author, count=10, not_by=0):
        """Return most recent items by author from newest to oldest."""
        
        author = coerce_to_list(author)
        items = []
        recent_items = self.get_items()
        recent_items.reverse()
        
        bydate = [(i.last_modified(), i) for i in self.get_items() \
            if (not not_by and (i.author in author)) or \
                (not_by and (i.author not in author))]
        bydate.sort()
        
        items = [i for date, i in bydate[-count:]]
        items.reverse()
        return items
        
    def recent_comments_by_author(self, author, count=10):
        """Return most recent comments in this blog by author.
        
        Returns list of tuples: (comment, parent item)
        """
        
        comments = []
        for item in self.get_items():
            com = item.comments_by(author)
            if com:
                comments.extend([(c, item) for c in com])
                
            
        bydate = [(i.last_modified(), i, parent) for i, parent in comments]
        bydate.sort()
        
        items = [(i, parent) for date, i, parent in bydate[-count:]]
        items.reverse()
        return items
        
        
    def highest_score_items(self, count=10):
        """Returns highest scoring items, highest to lowest."""
        
        items = [(i.get_karma_score(), i.date, i) for i in self.get_items() if i.get_karma_score() > 0]
        items.sort()
        items = [i for karma, date, i in items[-count:]]
        items.reverse()
        return items

    def num_items(self):
        # return len(self.get_items())      // slower because it creates a new list
        num = 0
        for item in self.__items:
            if not item.is_deleted():
                num +=1
        return num

    def num_active_items(self, days=3, consider_comments=True):
        """Returns the number of blog items that have been modified or commented on
        in the last X days. Also returns the date of the latest item, in case it's useful"""
        cutoff_date = datetime.utcnow() - timedelta(days=days)
        items = self.get_items()
        # active_items = [item for item in items if item.last_modified(consider_comments) > cutoff_date]      // slower because it creates a new list
        # return len(active_items)         
        num = 0
        latest_date = never
        for item in items:
            mod = item.last_modified(consider_comments)
            if mod > cutoff_date:
                num += 1
            if mod > latest_date:
                latest_date = mod
        return (num, latest_date)

    def num_old_new_comments(self, user):
        """Return as a tuple number of comments old and new for all undeleted BlogItems"""
        old = new = 0
        
        # we traverse the item list manually, instead of using get_items, in order
        # to save the additional lookup of having to call item_index(item) to retrieve
        # its index.
        item_index = 0
        for i in self.__items:
            if not i.is_deleted():
                last_read = i.last_read(user)
                (i_old, i_new) = i.num_old_new_comments(last_read)
                old += i_old
                new += i_new
            item_index += 1
        return (old, new)
        
    def has_unread_recent_items(self, user, count):
        """Return True if any of count most recent items haven't been read by user."""
        items = self.recent_items(count=count)
        for item in items:
            if not item.has_read_item(user, updated=item.last_modified()):
                return True
        return False
        
    def items_with_reader_counts(self):
        """Return list of items with reader counts: [(count, item), ...]"""
        items = []
        
        for i in self.get_items():
            view_count, num_readers = i.item_views()
            items.append((num_readers, i))
        return items
    
    def in_items(self, item):
        """Return true if item is in my list of items."""
        return item in self.__items
        
    def item_index(self, item):
        return self.__items.index(item)
        
    def get_items(self):
        return [item for item in self.__items if not item.is_deleted()]

    def get_all_items(self):
        return self.__items[:]
        
    def get_item(self, index):
        """Return a blog item by its index/id"""
        try:
            return self.__items[index]
        except IndexError:
            return None

    def get_main_item(self):
        """A Blog's main item is a primary discussion.
        Returns an instance of BlogItem"""
        return self.__main_item

    def set_main_item(self, item):
        """Set the blog's primary BlogItem
        A Blog's main item is a primary discussion."""
        assert(isinstance(item, BlogItem) or item is None)
        self.__main_item = item

    def get_item_count(self):
        """Return number of items, including deleted items."""
        return len(self.__items)
        
    def get_limbo_items(self):
        return [item for item in self.get_items() if item.get_karma_score() < 0]
        
    def notify_karma_changed(self):
        """Notice that karma changed on an item."""
        self.ihb.notify_karma_changed()        

    # Watchable methods
        
    def watchable_name(self):
        return self.ihb.blog_name()
    
    def watchable_changed(self, now=None):
        # remove cached recent_items
        if hasattr(self, '_cached_recent_items_with_date'):
            del self._cached_recent_items_with_date

        # tells group he has changed, too
        Watchable.watchable_changed(self, now)
        self.ihb.watchable_changed(now)        
        
    def watchable_modified_date(self):
        return self.watchable_last_change()

    def can_read(self, user):
        return self.ihb.can_read(user)