Beispiel #1
0
    def testItemTypeCreation(self):
        """
        Test that processors for a different Item types can be
        created, that they are valid Item types themselves, and that
        repeated calls return the same object when appropriate.
        """
        procB = batch.processor(TestWorkUnit)
        self.assertIdentical(self.procType, procB)

        procC = batch.processor(ExtraUnit)
        self.failIfIdentical(procB, procC)
        self.failIfEqual(procB.typeName, procC.typeName)
Beispiel #2
0
    def testItemTypeCreation(self):
        """
        Test that processors for a different Item types can be
        created, that they are valid Item types themselves, and that
        repeated calls return the same object when appropriate.
        """
        procB = batch.processor(TestWorkUnit)
        self.assertIdentical(self.procType, procB)

        procC = batch.processor(ExtraUnit)
        self.failIfIdentical(procB, procC)
        self.assertNotEqual(procB.typeName, procC.typeName)
Beispiel #3
0
    def test_itemAddedStartsBatchProcess(self):
        """
        If there are remote-style listeners for an item source, C{itemAdded}
        starts the batch process.

        This is not completely correct.  There may be items to process remotely
        when the main process starts up, before any new items are added.  This
        is simpler to implement, but it shouldn't be taken as a reason not to
        implement the actually correct solution.
        """
        st = store.Store(self.mktemp())
        svc = service.IService(st)
        svc.startService()
        self.addCleanup(svc.stopService)

        batchService = iaxiom.IBatchService(st)

        procType = batch.processor(TestWorkUnit)
        proc = procType(store=st)
        listener = WorkListener(store=st)
        proc.addReliableListener(listener, style=iaxiom.REMOTE)

        # Sanity check: addReliableListener should eventually also trigger a
        # batch process start if necessary.  But we don't want to test that case
        # here, so make sure it's not happening.
        self.assertEqual(batchService.batchController.mode, 'stopped')

        # Now trigger it to start.
        proc.itemAdded()

        # It probably won't be ready by now, but who knows.
        self.assertIn(batchService.batchController.mode, ('starting', 'ready'))
Beispiel #4
0
    def test_itemAddedStartsBatchProcess(self):
        """
        If there are remote-style listeners for an item source, C{itemAdded}
        starts the batch process.

        This is not completely correct.  There may be items to process remotely
        when the main process starts up, before any new items are added.  This
        is simpler to implement, but it shouldn't be taken as a reason not to
        implement the actually correct solution.
        """
        st = store.Store(self.mktemp())
        svc = service.IService(st)
        svc.startService()
        self.addCleanup(svc.stopService)

        batchService = iaxiom.IBatchService(st)

        procType = batch.processor(TestWorkUnit)
        proc = procType(store=st)
        listener = WorkListener(store=st)
        proc.addReliableListener(listener, style=iaxiom.REMOTE)

        # Sanity check: addReliableListener should eventually also trigger a
        # batch process start if necessary.  But we don't want to test that case
        # here, so make sure it's not happening.
        self.assertEqual(batchService.batchController.mode, "stopped")

        # Now trigger it to start.
        proc.itemAdded()

        # It probably won't be ready by now, but who knows.
        self.assertIn(batchService.batchController.mode, ("starting", "ready"))
Beispiel #5
0
    def test_itemAddedWithoutBatchService(self):
        """
        If the store has no batch service, C{itemAdded} doesn't start the batch
        process and also doesn't raise an exception.
        """
        # An in-memory store can't have a batch service.
        st = store.Store()
        svc = service.IService(st)
        svc.startService()
        self.addCleanup(svc.stopService)

        procType = batch.processor(TestWorkUnit)
        proc = procType(store=st)
        listener = WorkListener(store=st)
        proc.addReliableListener(listener, style=iaxiom.REMOTE)

        proc.itemAdded()

        # And still there should be no batch service at all.
        self.assertIdentical(iaxiom.IBatchService(st, None), None)
Beispiel #6
0
    def test_itemAddedWithoutBatchService(self):
        """
        If the store has no batch service, C{itemAdded} doesn't start the batch
        process and also doesn't raise an exception.
        """
        # An in-memory store can't have a batch service.
        st = store.Store()
        svc = service.IService(st)
        svc.startService()
        self.addCleanup(svc.stopService)

        procType = batch.processor(TestWorkUnit)
        proc = procType(store=st)
        listener = WorkListener(store=st)
        proc.addReliableListener(listener, style=iaxiom.REMOTE)

        proc.itemAdded()

        # And still there should be no batch service at all.
        self.assertIdentical(iaxiom.IBatchService(st, None), None)
Beispiel #7
0
    def test_itemAddedBeforeStarted(self):
        """
        If C{itemAdded} is called before the batch service is started, the batch
        process is not started.
        """
        st = store.Store(self.mktemp())

        procType = batch.processor(TestWorkUnit)
        proc = procType(store=st)
        listener = WorkListener(store=st)
        proc.addReliableListener(listener, style=iaxiom.REMOTE)

        proc.itemAdded()

        # When the service later starts, the batch service needn't start its
        # process.  Not that this would be bad.  Feel free to reverse this
        # behavior if you really want.
        svc = service.IService(st)
        svc.startService()
        self.addCleanup(svc.stopService)

        batchService = iaxiom.IBatchService(st)
        self.assertEqual(batchService.batchController.mode, 'stopped')
Beispiel #8
0
    def test_itemAddedBeforeStarted(self):
        """
        If C{itemAdded} is called before the batch service is started, the batch
        process is not started.
        """
        st = store.Store(self.mktemp())

        procType = batch.processor(TestWorkUnit)
        proc = procType(store=st)
        listener = WorkListener(store=st)
        proc.addReliableListener(listener, style=iaxiom.REMOTE)

        proc.itemAdded()

        # When the service later starts, the batch service needn't start its
        # process.  Not that this would be bad.  Feel free to reverse this
        # behavior if you really want.
        svc = service.IService(st)
        svc.startService()
        self.addCleanup(svc.stopService)

        batchService = iaxiom.IBatchService(st)
        self.assertEqual(batchService.batchController.mode, "stopped")
Beispiel #9
0
    """
    Exception always raised by L{BrokenReliableListener.processItem}.
    """



class BatchWorkItem(item.Item):
    """
    Item class which will be delivered as work units for testing error handling
    around reliable listeners.
    """
    value = attributes.text(default=u"unprocessed")



BatchWorkSource = batch.processor(BatchWorkItem)



class BrokenReliableListener(item.Item):
    """
    A listener for batch work which always raises an exception from its
    processItem method.  Used to test that errors from processItem are properly
    handled.
    """

    anAttribute = attributes.integer()

    def processItem(self, item):
        raise BrokenException("Broken Reliable Listener is working as expected.")
Beispiel #10
0
from twisted.mail import smtp, imap4
from twisted.mail.smtp import IMessageDeliveryFactory
from twisted.application.service import IService

from axiom import item, attributes, userbase, batch
from axiom.attributes import reference, integer, bytes
from axiom.upgrade import registerUpgrader
from axiom.errors import MissingDomainPart
from axiom.dependency import dependsOn, installOn

from xmantissa.ixmantissa import IProtocolFactoryFactory
from xmantissa.port import TCPPort, SSLPort

from xquotient import iquotient, exmess, mimestorage

MessageSource = batch.processor(exmess.Message)


class MailConfigurationError(RuntimeError):
    """You specified some invalid configuration.
    """


class MessageDelivery(object):
    """
    Message Delivery implementation used by anonymous senders.

    This implementation only allows messages to be delivered to local users
    (ie, it does not perform relaying) and rejects sender addresses which
    belong to local users.
    """
from axiom.batch import processor

from xmantissa.fulltext import HypeIndexer, XapianIndexer, PyLuceneIndexer


class StubItem(Item):
    """
    Place-holder.  Stands in as an indexable thing, but no instances of this
    will ever actually be created.
    """
    __module__ = 'xmantissa.test.historic.stub_remoteIndexer1to2'

    attribute = integer()


StubSource = processor(StubItem)


def createDatabase(s):
    """
    Create a batch processor for L{StubItem} instances and add it as a message
    source to an instance of each of the kinds of indexers we support.
    """
    source = StubSource(store=s)
    for cls in [HypeIndexer, XapianIndexer, PyLuceneIndexer]:
        indexer = cls(store=s)
        source.addReliableListener(indexer)



if __name__ == '__main__':
Beispiel #12
0
class BrokenException(Exception):
    """
    Exception always raised by L{BrokenReliableListener.processItem}.
    """


class BatchWorkItem(item.Item):
    """
    Item class which will be delivered as work units for testing error handling
    around reliable listeners.
    """

    value = attributes.text(default="unprocessed")


BatchWorkSource = batch.processor(BatchWorkItem)


class BrokenReliableListener(item.Item):
    """
    A listener for batch work which always raises an exception from its
    processItem method.  Used to test that errors from processItem are properly
    handled.
    """

    anAttribute = attributes.integer()

    def processItem(self, item):
        raise BrokenException("Broken Reliable Listener is working as expected.")

Beispiel #13
0
 def setUp(self):
     self.procType = batch.processor(TestWorkUnit)
     self.store = store.Store()
     self.scheduler = iaxiom.IScheduler(self.store)
Beispiel #14
0
        for blurb in self.store.query(Blurb, Blurb.parent == self):
            blurb.delete()
        self.deleteFromStore()

    def stored(self):
        """
        Hook the occurrence of a blurb being added to a store and notify the
        batch processor, if one exists, of the event so that it can schedule
        itself to handle the new blurb, if necessary.
        """
        source = self.store.findUnique(BlurbSource, default=None)
        if source is not None:
            source.itemAdded()


BlurbSource = batch.processor(Blurb)


class PastBlurb(Item):
    """
    This is an old version of a blurb.  It contains the text as it used to be
    at a particular point in time.
    """

    typeName = 'hyperbola_past_blurb'
    schemaVersion = 1

    dateEdited = timestamp()

    title = text()
    body = text()
Beispiel #15
0
 def setUp(self):
     self.procType = batch.processor(TestWorkUnit)
     self.store = store.Store()
     self.scheduler = Scheduler(store=self.store)
     self.scheduler.installOn(self.store)
Beispiel #16
0
from twisted.mail import smtp, imap4
from twisted.mail.smtp import IMessageDeliveryFactory
from twisted.application.service import IService

from axiom import item, attributes, userbase, batch
from axiom.attributes import reference, integer, bytes
from axiom.upgrade import registerUpgrader
from axiom.errors import MissingDomainPart
from axiom.dependency import dependsOn, installOn

from xmantissa.ixmantissa import IProtocolFactoryFactory
from xmantissa.port import TCPPort, SSLPort

from xquotient import iquotient, exmess, mimestorage

MessageSource = batch.processor(exmess.Message)

class MailConfigurationError(RuntimeError):
    """You specified some invalid configuration.
    """

class MessageDelivery(object):
    """
    Message Delivery implementation used by anonymous senders.

    This implementation only allows messages to be delivered to local users
    (ie, it does not perform relaying) and rejects sender addresses which
    belong to local users.
    """
    implements(smtp.IMessageDelivery)
# -*- test-case-name: axiom.test.historic.test_processor1to2 -*-

from axiom.item import Item
from axiom.attributes import text
from axiom.batch import processor

from axiom.test.historic.stubloader import saveStub


class Dummy(Item):
    __module__ = 'axiom.test.historic.stub_processor1to2'
    typeName = 'axiom_test_historic_stub_processor1to2_dummy'

    attribute = text()


DummyProcessor = processor(Dummy)


def createDatabase(s):
    """
    Put a processor of some sort into a Store.
    """
    t = DummyProcessor(store=s)
    print(t.typeName)


if __name__ == '__main__':
    saveStub(createDatabase, 7973)
Beispiel #18
0
    def testRecoveryAfterFailure(self):
        """
        Create an indexer, attach some sources to it, let it process some
        messages, corrupt the database, let it try to clean things up, then
        make sure the index is in a reasonable state.
        """
        # Try to access the indexer directly first so that if it is
        # unavailable, the test will be skipped.
        self.openReadIndex().close()

        service = batch.BatchProcessingService(self.store, iaxiom.REMOTE)
        task = service.step()

        source = batch.processor(IndexableThing)(store=self.store)
        self.indexer.addSource(source)

        things = [
            IndexableThing(store=self.store,
                           _documentType=u'thing',
                           _uniqueIdentifier='100',
                           _textParts=[u'apple', u'banana'],
                           _keywordParts={}),
            IndexableThing(store=self.store,
                           _documentType=u'thing',
                           _uniqueIdentifier='200',
                           _textParts=[u'cherry'],
                           _keywordParts={})]

        for i in xrange(len(things)):
            task.next()

        self.indexer.suspend()

        # Sanity check - make sure both items come back from a search before
        # going on with the real core of the test.
        reader = self.openReadIndex()
        self.assertEquals(identifiersFrom(reader.search(u'apple')), [100])
        self.assertEquals(identifiersFrom(reader.search(u'cherry')), [200])
        self.assertEquals(identifiersFrom(reader.search(u'drosophila')), [])
        reader.close()

        self.corruptIndex()
        self.indexer.resume()

        things.append(
            IndexableThing(store=self.store,
                           _documentType=u'thing',
                           _uniqueIdentifier='300',
                           _textParts=[u'drosophila', u'melanogaster'],
                           _keywordParts={}))

        # Step it once so that it notices the index has been corrupted.
        task.next()
        self.indexer.suspend()

        # At this point, the index should have been deleted, so any search
        # should turn up no results.
        reader = self.openReadIndex()
        self.assertEquals(identifiersFrom(reader.search(u'apple')), [])
        self.assertEquals(identifiersFrom(reader.search(u'cherry')), [])
        self.assertEquals(identifiersFrom(reader.search(u'drosophila')), [])
        reader.close()

        self.indexer.resume()

        # Step it another N so that each thing gets re-indexed.
        for i in xrange(len(things)):
            task.next()

        self.indexer.suspend()

        reader = self.openReadIndex()
        self.assertEquals(identifiersFrom(reader.search(u'apple')), [100])
        self.assertEquals(identifiersFrom(reader.search(u'cherry')), [200])
        self.assertEquals(identifiersFrom(reader.search(u'drosophila')), [300])
        reader.close()
Beispiel #19
0
        for blurb in self.store.query(Blurb, Blurb.parent == self):
            blurb.delete()
        self.deleteFromStore()

    def stored(self):
        """
        Hook the occurrence of a blurb being added to a store and notify the
        batch processor, if one exists, of the event so that it can schedule
        itself to handle the new blurb, if necessary.
        """
        source = self.store.findUnique(BlurbSource, default=None)
        if source is not None:
            source.itemAdded()


BlurbSource = batch.processor(Blurb)


class PastBlurb(Item):
    """
    This is an old version of a blurb.  It contains the text as it used to be
    at a particular point in time.
    """

    typeName = "hyperbola_past_blurb"
    schemaVersion = 1

    dateEdited = timestamp()

    title = text()
    body = text()
Beispiel #20
0
 def setUp(self):
     self.procType = batch.processor(TestWorkUnit)
     self.store = store.Store()
     self.scheduler = iaxiom.IScheduler(self.store)
Beispiel #21
0
            md.data = data

    # IFulltextIndexable

    def uniqueIdentifier(self):
        return str(self.storeID)

    def textParts(self):
        yield self.url

        if self.title is not None:
            yield self.title



LinkEntrySource = batch.processor(LinkEntry)



class LinkEntryComment(Item):
    implements(IFulltextIndexable)

    typeName = 'eridanus_plugins_linkdb_linkentrycomment'
    schemaVersion = 1

    created = timestamp(doc="""
    Timestamp of when this comment was created.
    """, defaultFactory=lambda: Time())

    parent = reference(doc="""
    L{LinkEntry} item this comment refers to.
Beispiel #22
0
    def testRecoveryAfterFailure(self):
        """
        Create an indexer, attach some sources to it, let it process some
        messages, corrupt the database, let it try to clean things up, then
        make sure the index is in a reasonable state.
        """
        # Try to access the indexer directly first so that if it is
        # unavailable, the test will be skipped.
        self.openReadIndex().close()

        service = batch.BatchProcessingService(self.store, iaxiom.REMOTE)
        task = service.step()

        source = batch.processor(IndexableThing)(store=self.store)
        self.indexer.addSource(source)

        things = [
            IndexableThing(store=self.store,
                           _documentType=u'thing',
                           _uniqueIdentifier='100',
                           _textParts=[u'apple', u'banana'],
                           _keywordParts={}),
            IndexableThing(store=self.store,
                           _documentType=u'thing',
                           _uniqueIdentifier='200',
                           _textParts=[u'cherry'],
                           _keywordParts={})]

        for i in xrange(len(things)):
            task.next()

        self.indexer.suspend()

        # Sanity check - make sure both items come back from a search before
        # going on with the real core of the test.
        reader = self.openReadIndex()
        self.assertEquals(identifiersFrom(reader.search(u'apple')), [100])
        self.assertEquals(identifiersFrom(reader.search(u'cherry')), [200])
        self.assertEquals(identifiersFrom(reader.search(u'drosophila')), [])
        reader.close()

        self.corruptIndex()
        self.indexer.resume()

        things.append(
            IndexableThing(store=self.store,
                           _documentType=u'thing',
                           _uniqueIdentifier='300',
                           _textParts=[u'drosophila', u'melanogaster'],
                           _keywordParts={}))

        # Step it once so that it notices the index has been corrupted.
        task.next()
        self.indexer.suspend()

        # At this point, the index should have been deleted, so any search
        # should turn up no results.
        reader = self.openReadIndex()
        self.assertEquals(identifiersFrom(reader.search(u'apple')), [])
        self.assertEquals(identifiersFrom(reader.search(u'cherry')), [])
        self.assertEquals(identifiersFrom(reader.search(u'drosophila')), [])
        reader.close()

        self.indexer.resume()

        # Step it another N so that each thing gets re-indexed.
        for i in xrange(len(things)):
            task.next()

        self.indexer.suspend()

        reader = self.openReadIndex()
        self.assertEquals(identifiersFrom(reader.search(u'apple')), [100])
        self.assertEquals(identifiersFrom(reader.search(u'cherry')), [200])
        self.assertEquals(identifiersFrom(reader.search(u'drosophila')), [300])
        reader.close()