Ejemplo n.º 1
0
    def initFromStore(self):
        """
        Initialise this object from the store, based on its UID and home
        resource ID. We read in and cache all the extra metadata from the DB to
        avoid having to do DB queries for those individually later.

        @return: L{self} if object exists in the DB, else C{None}
        """
        rows = (yield
                self._oneNotificationQuery.on(self._txn,
                                              uid=self._uid,
                                              homeID=self._home._resourceID))
        if rows:
            (
                self._resourceID,
                self._md5,
                self._size,
                self._notificationType,
                self._created,
                self._modified,
            ) = tuple(rows[0])
            self._created = parseSQLTimestamp(self._created)
            self._modified = parseSQLTimestamp(self._modified)
            try:
                self._notificationType = json.loads(self._notificationType)
            except ValueError:
                pass
            if isinstance(self._notificationType, unicode):
                self._notificationType = self._notificationType.encode("utf-8")
            self._loadPropertyStore()
            returnValue(self)
        else:
            returnValue(None)
Ejemplo n.º 2
0
    def initFromStore(self):
        """
        Execute necessary SQL queries to retrieve attributes.

        @return: C{True} if this attachment exists, C{False} otherwise.
        """
        att = self._attachmentSchema
        if self._dropboxID and self._dropboxID != ".":
            where = (att.DROPBOX_ID == self._dropboxID).And(
                att.PATH == self._name)
        else:
            where = (att.ATTACHMENT_ID == self._attachmentID)
        rows = (yield Select(
            self._allColumns(),
            From=att,
            Where=where
        ).on(self._txn))

        if not rows:
            returnValue(None)

        for attr, value in zip(self._rowAttributes(), rows[0]):
            setattr(self, attr, value)
        self._created = parseSQLTimestamp(self._created)
        self._modified = parseSQLTimestamp(self._modified)
        self._contentType = MimeType.fromString(self._contentType)

        returnValue(self)
Ejemplo n.º 3
0
    def initFromStore(self):
        """
        Initialise this object from the store, based on its UID and home
        resource ID. We read in and cache all the extra metadata from the DB to
        avoid having to do DB queries for those individually later.

        @return: L{self} if object exists in the DB, else C{None}
        """
        rows = (yield self._oneNotificationQuery.on(
            self._txn, uid=self._uid, homeID=self._home._resourceID))
        if rows:
            (self._resourceID,
             self._md5,
             self._size,
             self._notificationType,
             self._created,
             self._modified,) = tuple(rows[0])
            self._created = parseSQLTimestamp(self._created)
            self._modified = parseSQLTimestamp(self._modified)
            try:
                self._notificationType = json.loads(self._notificationType)
            except ValueError:
                pass
            if isinstance(self._notificationType, unicode):
                self._notificationType = self._notificationType.encode("utf-8")
            self._loadPropertyStore()
            returnValue(self)
        else:
            returnValue(None)
Ejemplo n.º 4
0
    def initFromStore(self):
        """
        Execute necessary SQL queries to retrieve attributes.

        @return: C{True} if this attachment exists, C{False} otherwise.
        """
        att = self._attachmentSchema
        if self._dropboxID and self._dropboxID != ".":
            where = (att.DROPBOX_ID == self._dropboxID).And(
                att.PATH == self._name)
        else:
            where = (att.ATTACHMENT_ID == self._attachmentID)
        rows = (yield Select(self._allColumns(), From=att,
                             Where=where).on(self._txn))

        if not rows:
            returnValue(None)

        for attr, value in zip(self._rowAttributes(), rows[0]):
            setattr(self, attr, value)
        self._created = parseSQLTimestamp(self._created)
        self._modified = parseSQLTimestamp(self._modified)
        self._contentType = MimeType.fromString(self._contentType)

        returnValue(self)
Ejemplo n.º 5
0
    def makeClass(cls, txn, attachmentData):
        """
        Given the various database rows, build the actual class.

        @param attachmentData: the standard set of attachment columns
        @type attachmentData: C{list}

        @return: the constructed child class
        @rtype: L{Attachment}
        """

        att = cls._attachmentSchema
        dropbox_id = attachmentData[cls._allColumns().index(att.DROPBOX_ID)]
        c = ManagedAttachment if dropbox_id == "." else DropBoxAttachment
        child = c(
            txn,
            attachmentData[cls._allColumns().index(att.ATTACHMENT_ID)],
            attachmentData[cls._allColumns().index(att.DROPBOX_ID)],
            attachmentData[cls._allColumns().index(att.PATH)],
        )

        for attr, value in zip(child._rowAttributes(), attachmentData):
            setattr(child, attr, value)
        child._created = parseSQLTimestamp(child._created)
        child._modified = parseSQLTimestamp(child._modified)
        child._contentType = MimeType.fromString(child._contentType)

        return child
Ejemplo n.º 6
0
    def makeClass(cls, txn, attachmentData):
        """
        Given the various database rows, build the actual class.

        @param attachmentData: the standard set of attachment columns
        @type attachmentData: C{list}

        @return: the constructed child class
        @rtype: L{Attachment}
        """

        att = cls._attachmentSchema
        dropbox_id = attachmentData[cls._allColumns().index(att.DROPBOX_ID)]
        c = ManagedAttachment if dropbox_id == "." else DropBoxAttachment
        child = c(
            txn,
            attachmentData[cls._allColumns().index(att.ATTACHMENT_ID)],
            attachmentData[cls._allColumns().index(att.DROPBOX_ID)],
            attachmentData[cls._allColumns().index(att.PATH)],
        )

        for attr, value in zip(child._rowAttributes(), attachmentData):
            setattr(child, attr, value)
        child._created = parseSQLTimestamp(child._created)
        child._modified = parseSQLTimestamp(child._modified)
        child._contentType = MimeType.fromString(child._contentType)

        return child
Ejemplo n.º 7
0
    def setData(self, uid, notificationtype, notificationdata, inserting=False):
        """
        Set the object resource data and update and cached metadata.
        """

        notificationtext = json.dumps(notificationdata)
        self._notificationType = notificationtype
        self._md5 = hashlib.md5(notificationtext).hexdigest()
        self._size = len(notificationtext)
        if inserting:
            rows = yield self._newNotificationQuery.on(
                self._txn, homeID=self._home._resourceID, uid=uid,
                notificationType=json.dumps(self._notificationType),
                notificationData=notificationtext, md5=self._md5
            )
            self._resourceID, self._created, self._modified = (
                rows[0][0],
                parseSQLTimestamp(rows[0][1]),
                parseSQLTimestamp(rows[0][2]),
            )
            self._loadPropertyStore()
        else:
            rows = yield self._updateNotificationQuery.on(
                self._txn, homeID=self._home._resourceID, uid=uid,
                notificationType=json.dumps(self._notificationType),
                notificationData=notificationtext, md5=self._md5
            )
            self._modified = parseSQLTimestamp(rows[0][0])
        self._notificationData = notificationdata
Ejemplo n.º 8
0
    def setData(self, uid, notificationtype, notificationdata, inserting=False):
        """
        Set the object resource data and update and cached metadata.
        """

        notificationtext = json.dumps(notificationdata)
        self._notificationType = notificationtype
        self._md5 = hashlib.md5(notificationtext).hexdigest()
        self._size = len(notificationtext)
        if inserting:
            rows = yield self._newNotificationQuery.on(
                self._txn, homeID=self._home._resourceID, uid=uid,
                notificationType=json.dumps(self._notificationType),
                notificationData=notificationtext, md5=self._md5
            )
            self._resourceID, self._created, self._modified = (
                rows[0][0],
                parseSQLTimestamp(rows[0][1]),
                parseSQLTimestamp(rows[0][2]),
            )
            self._loadPropertyStore()
        else:
            rows = yield self._updateNotificationQuery.on(
                self._txn, homeID=self._home._resourceID, uid=uid,
                notificationType=json.dumps(self._notificationType),
                notificationData=notificationtext, md5=self._md5
            )
            self._modified = parseSQLTimestamp(rows[0][0])
        self._notificationData = notificationdata
Ejemplo n.º 9
0
    def loadAllObjects(cls, parent):
        """
        Load all child objects and return a list of them. This must create the
        child classes and initialize them using "batched" SQL operations to keep
        this constant wrt the number of children. This is an optimization for
        Depth:1 operations on the collection.
        """

        results = []

        # Load from the main table first
        dataRows = (yield
                    cls._allColumnsByHomeIDQuery.on(parent._txn,
                                                    homeID=parent._resourceID))

        if dataRows:
            # Get property stores for all these child resources (if any found)
            propertyStores = (yield PropertyStore.forMultipleResources(
                parent.uid(),
                None,
                None,
                parent._txn,
                schema.NOTIFICATION.RESOURCE_ID,
                schema.NOTIFICATION.NOTIFICATION_HOME_RESOURCE_ID,
                parent._resourceID,
            ))

        # Create the actual objects merging in properties
        for row in dataRows:
            child = cls(parent, None)
            (
                child._resourceID,
                child._uid,
                child._md5,
                child._size,
                child._notificationType,
                child._created,
                child._modified,
            ) = tuple(row)
            child._created = parseSQLTimestamp(child._created)
            child._modified = parseSQLTimestamp(child._modified)
            try:
                child._notificationType = json.loads(child._notificationType)
            except ValueError:
                pass
            if isinstance(child._notificationType, unicode):
                child._notificationType = child._notificationType.encode(
                    "utf-8")
            child._loadPropertyStore(
                props=propertyStores.get(child._resourceID, None))
            results.append(child)

        returnValue(results)
Ejemplo n.º 10
0
    def loadAllObjects(cls, parent):
        """
        Load all child objects and return a list of them. This must create the
        child classes and initialize them using "batched" SQL operations to keep
        this constant wrt the number of children. This is an optimization for
        Depth:1 operations on the collection.
        """

        results = []

        # Load from the main table first
        dataRows = (
            yield cls._allColumnsByHomeIDQuery.on(parent._txn,
                                                  homeID=parent._resourceID))

        if dataRows:
            # Get property stores for all these child resources (if any found)
            propertyStores = (yield PropertyStore.forMultipleResources(
                parent.uid(),
                None,
                None,
                parent._txn,
                schema.NOTIFICATION.RESOURCE_ID,
                schema.NOTIFICATION.NOTIFICATION_HOME_RESOURCE_ID,
                parent._resourceID,
            ))

        # Create the actual objects merging in properties
        for row in dataRows:
            child = cls(parent, None)
            (child._resourceID,
             child._uid,
             child._md5,
             child._size,
             child._notificationType,
             child._created,
             child._modified,) = tuple(row)
            child._created = parseSQLTimestamp(child._created)
            child._modified = parseSQLTimestamp(child._modified)
            try:
                child._notificationType = json.loads(child._notificationType)
            except ValueError:
                pass
            if isinstance(child._notificationType, unicode):
                child._notificationType = child._notificationType.encode("utf-8")
            child._loadPropertyStore(
                props=propertyStores.get(child._resourceID, None)
            )
            results.append(child)

        returnValue(results)
Ejemplo n.º 11
0
    def create(cls, txn, dropboxID, name, ownerHomeID):
        """
        Create a new Attachment object.

        @param txn: The transaction to use
        @type txn: L{CommonStoreTransaction}
        @param dropboxID: the identifier for the attachment (dropbox id or managed id)
        @type dropboxID: C{str}
        @param name: the name of the attachment
        @type name: C{str}
        @param ownerHomeID: the resource-id of the home collection of the attachment owner
        @type ownerHomeID: C{int}
        """

        # If store has already migrated to managed attachments we will prevent creation of dropbox attachments
        dropbox = (yield txn.store().dropboxAllowed(txn))
        if not dropbox:
            raise AttachmentDropboxNotAllowed

        # Now create the DB entry
        att = cls._attachmentSchema
        rows = (yield Insert(
            {
                att.CALENDAR_HOME_RESOURCE_ID: ownerHomeID,
                att.DROPBOX_ID: dropboxID,
                att.CONTENT_TYPE: "",
                att.SIZE: 0,
                att.MD5: "",
                att.PATH: name,
            },
            Return=(att.ATTACHMENT_ID, att.CREATED, att.MODIFIED)).on(txn))

        row_iter = iter(rows[0])
        a_id = row_iter.next()
        created = parseSQLTimestamp(row_iter.next())
        modified = parseSQLTimestamp(row_iter.next())

        attachment = cls(txn, a_id, dropboxID, name, ownerHomeID, True)
        attachment._created = created
        attachment._modified = modified

        # File system paths need to exist
        try:
            attachment._path.parent().makedirs()
        except:
            pass

        returnValue(attachment)
Ejemplo n.º 12
0
    def deserialize(cls, attrmap):
        """
        Given an L{dict} mapping attributes to values, create an L{Record} with
        the specified values. Sub-classes may need to override this to handle special
        values that need to be converted to specific types. They also need to override
        this to handle possible schema mismatches (attributes no longer used, new
        attributes not present in the map).

        @param attrmap: serialized representation of a record
        @type attrmap: L{dict} of L{str}:L{str}

        @return: a newly created, but not inserted, record
        @rtype: L{SerializableRecord}
        """

        # Certain values have to be mapped to non-str types
        mapped = {}
        for attr in attrmap:
            value = attrmap[attr]
            col = cls.__attrmap__[attr]
            if col.model.type.name == "timestamp" and value is not None:
                value = parseSQLTimestamp(value)
            mapped[attr] = value

        record = cls.make(**mapped)
        return record
Ejemplo n.º 13
0
    def create(cls, txn, dropboxID, name, ownerHomeID):
        """
        Create a new Attachment object.

        @param txn: The transaction to use
        @type txn: L{CommonStoreTransaction}
        @param dropboxID: the identifier for the attachment (dropbox id or managed id)
        @type dropboxID: C{str}
        @param name: the name of the attachment
        @type name: C{str}
        @param ownerHomeID: the resource-id of the home collection of the attachment owner
        @type ownerHomeID: C{int}
        """

        # If store has already migrated to managed attachments we will prevent creation of dropbox attachments
        dropbox = (yield txn.store().dropboxAllowed(txn))
        if not dropbox:
            raise AttachmentDropboxNotAllowed

        # Now create the DB entry
        att = cls._attachmentSchema
        rows = (yield Insert({
            att.CALENDAR_HOME_RESOURCE_ID : ownerHomeID,
            att.DROPBOX_ID                : dropboxID,
            att.CONTENT_TYPE              : "",
            att.SIZE                      : 0,
            att.MD5                       : "",
            att.PATH                      : name,
        }, Return=(att.ATTACHMENT_ID, att.CREATED, att.MODIFIED)).on(txn))

        row_iter = iter(rows[0])
        a_id = row_iter.next()
        created = parseSQLTimestamp(row_iter.next())
        modified = parseSQLTimestamp(row_iter.next())

        attachment = cls(txn, a_id, dropboxID, name, ownerHomeID, True)
        attachment._created = created
        attachment._modified = modified

        # File system paths need to exist
        try:
            attachment._path.parent().makedirs()
        except:
            pass

        returnValue(attachment)
Ejemplo n.º 14
0
    def _create(cls, txn, managedID, ownerHomeID):
        """
        Create a new managed Attachment object.

        @param txn: The transaction to use
        @type txn: L{CommonStoreTransaction}
        @param managedID: the identifier for the attachment
        @type managedID: C{str}
        @param ownerHomeID: the resource-id of the home collection of the attachment owner
        @type ownerHomeID: C{int}
        """

        # Now create the DB entry
        att = cls._attachmentSchema
        rows = (yield Insert(
            {
                att.CALENDAR_HOME_RESOURCE_ID: ownerHomeID,
                att.DROPBOX_ID: ".",
                att.CONTENT_TYPE: "",
                att.SIZE: 0,
                att.MD5: "",
                att.PATH: "",
            },
            Return=(att.ATTACHMENT_ID, att.CREATED, att.MODIFIED)).on(txn))

        row_iter = iter(rows[0])
        a_id = row_iter.next()
        created = parseSQLTimestamp(row_iter.next())
        modified = parseSQLTimestamp(row_iter.next())

        attachment = cls(txn, a_id, ".", None, ownerHomeID, True)
        attachment._managedID = managedID
        attachment._created = created
        attachment._modified = modified

        # File system paths need to exist
        try:
            attachment._path.parent().makedirs()
        except:
            pass

        returnValue(attachment)
Ejemplo n.º 15
0
    def test_parseSQLTimestamp(self):
        """
        L{parseSQLTimestamp} parses the traditional SQL timestamp.
        """
        tests = (
            ("2012-04-04 12:34:56", datetime.datetime(2012, 4, 4, 12, 34, 56)),
            ("2012-12-31 01:01:01", datetime.datetime(2012, 12, 31, 1, 1, 1)),
        )

        for sqlStr, result in tests:
            self.assertEqual(parseSQLTimestamp(sqlStr), result)
Ejemplo n.º 16
0
    def test_parseSQLTimestamp(self):
        """
        L{parseSQLTimestamp} parses the traditional SQL timestamp.
        """
        tests = (
            ("2012-04-04 12:34:56", datetime.datetime(2012, 4, 4, 12, 34, 56)),
            ("2012-12-31 01:01:01", datetime.datetime(2012, 12, 31, 1, 1, 1)),
        )

        for sqlStr, result in tests:
            self.assertEqual(parseSQLTimestamp(sqlStr), result)
Ejemplo n.º 17
0
    def _create(cls, txn, managedID, ownerHomeID):
        """
        Create a new managed Attachment object.

        @param txn: The transaction to use
        @type txn: L{CommonStoreTransaction}
        @param managedID: the identifier for the attachment
        @type managedID: C{str}
        @param ownerHomeID: the resource-id of the home collection of the attachment owner
        @type ownerHomeID: C{int}
        """

        # Now create the DB entry
        att = cls._attachmentSchema
        rows = (yield Insert({
            att.CALENDAR_HOME_RESOURCE_ID : ownerHomeID,
            att.DROPBOX_ID                : ".",
            att.CONTENT_TYPE              : "",
            att.SIZE                      : 0,
            att.MD5                       : "",
            att.PATH                      : "",
        }, Return=(att.ATTACHMENT_ID, att.CREATED, att.MODIFIED)).on(txn))

        row_iter = iter(rows[0])
        a_id = row_iter.next()
        created = parseSQLTimestamp(row_iter.next())
        modified = parseSQLTimestamp(row_iter.next())

        attachment = cls(txn, a_id, ".", None, ownerHomeID, True)
        attachment._managedID = managedID
        attachment._created = created
        attachment._modified = modified

        # File system paths need to exist
        try:
            attachment._path.parent().makedirs()
        except:
            pass

        returnValue(attachment)
Ejemplo n.º 18
0
    def _attributesFromRow(self, attributeList):
        """
        Take some data loaded from a row and apply it to this instance,
        converting types as necessary.

        @param attributeList: a C{list} of 2-C{tuples} of C{(attributeName,
            attributeValue)}.
        """
        for setAttribute, setValue in attributeList:
            setColumn = self.__attrmap__[setAttribute]
            if setColumn.model.type.name == "timestamp" and setValue is not None:
                setValue = parseSQLTimestamp(setValue)
            setattr(self, setAttribute, setValue)
Ejemplo n.º 19
0
    def _attributesFromRow(self, attributeList):
        """
        Take some data loaded from a row and apply it to this instance,
        converting types as necessary.

        @param attributeList: a C{list} of 2-C{tuples} of C{(attributeName,
            attributeValue)}.
        """
        for setAttribute, setValue in attributeList:
            setColumn = self.__attrmap__[setAttribute]
            if setColumn.model.type.name == "timestamp":
                setValue = parseSQLTimestamp(setValue)
            setattr(self, setAttribute, setValue)
    def test_eventsOlderThan(self):
        cutoff = DateTime(now, 4, 1, 0, 0, 0)
        txn = self._sqlCalendarStore.newTransaction()

        # Query for all old events
        results = (yield txn.eventsOlderThan(cutoff))
        self.assertEquals(
            sorted(results),
            sorted([
                ['home1', 'calendar1', 'old.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home1', 'calendar1', 'oldattachment1.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home1', 'calendar1', 'oldattachment2.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home1', 'calendar1', 'oldmattachment1.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home1', 'calendar1', 'oldmattachment2.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home2', 'calendar3', 'repeating_awhile.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home2', 'calendar2', 'recent.ics', parseSQLTimestamp('%s-03-04 22:15:00' % (now,))],
                ['home2', 'calendar2', 'oldattachment1.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home2', 'calendar2', 'oldattachment3.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home2', 'calendar2', 'oldattachment4.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home2', 'calendar2', 'oldmattachment1.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home2', 'calendar2', 'oldmattachment3.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home2', 'calendar2', 'oldmattachment4.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
            ])
        )

        # Query for oldest event - actually with limited time caching, the oldest event
        # cannot be precisely known, all we get back is the first one in the sorted list
        # where each has the 1901 "dummy" time stamp to indicate a partial cache
        results = (yield txn.eventsOlderThan(cutoff, batchSize=1))
        self.assertEquals(len(results), 1)
    def test_eventsOlderThan(self):
        cutoff = DateTime(now, 4, 1, 0, 0, 0)
        txn = self._sqlCalendarStore.newTransaction()

        # Query for all old events
        results = (yield txn.eventsOlderThan(cutoff))
        self.assertEquals(
            sorted(results),
            sorted([
                ['home1', 'calendar1', 'old.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home1', 'calendar1', 'oldattachment1.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home1', 'calendar1', 'oldattachment2.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home1', 'calendar1', 'oldmattachment1.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home1', 'calendar1', 'oldmattachment2.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home2', 'calendar3', 'repeating_awhile.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home2', 'calendar2', 'recent.ics', parseSQLTimestamp('%s-03-04 22:15:00' % (now,))],
                ['home2', 'calendar2', 'oldattachment1.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home2', 'calendar2', 'oldattachment3.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home2', 'calendar2', 'oldattachment4.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home2', 'calendar2', 'oldmattachment1.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home2', 'calendar2', 'oldmattachment3.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
                ['home2', 'calendar2', 'oldmattachment4.ics', parseSQLTimestamp('1901-01-01 01:00:00')],
            ])
        )

        # Query for oldest event - actually with limited time caching, the oldest event
        # cannot be precisely known, all we get back is the first one in the sorted list
        # where each has the 1901 "dummy" time stamp to indicate a partial cache
        results = (yield txn.eventsOlderThan(cutoff, batchSize=1))
        self.assertEquals(len(results), 1)