Пример #1
0
    def test_disconnect_with_user_locked_after_auth(self):
        """Client gets disconnected if the user is locked after auth."""
        # add the log handler
        logger = logging.getLogger('storage.server')
        hdlr = MementoHandler()
        hdlr.setLevel(logging.INFO)
        logger.addHandler(hdlr)
        # define a connectionLostHandler to know when the client
        # gets disconnected.
        d = defer.Deferred()

        def conn_lost_handler(r):
            """Connection lost!"""
            d.callback(None)

        @defer.inlineCallbacks
        def dummy(client):
            # set the connection lost handler
            client.connectionLostHandler = conn_lost_handler
            # trigger an operation, which should
            yield client.dummy_authenticate("open sesame")
            root_id = yield client.get_root()
            # lock the user:
            usr = self.store.get(StorageUser, 0)
            usr.locked = True
            self.store.commit()
            client.make_dir(request.ROOT, root_id, u"open sesame")
            yield d
            # check we logged a warning about this.
            self.assertTrue(hdlr.check_warning(
                "Shutting down protocol: user locked"))
        return self.callback_test(dummy, add_default_callbacks=True)
Пример #2
0
class BaseEQTestCase(BaseTwistedTestCase):
    """ Setup an EQ for test. """

    _monitor_class = FakeMonitor

    @defer.inlineCallbacks
    def setUp(self):
        """Setup the test."""
        yield super(BaseEQTestCase, self).setUp()
        self.fsmdir = self.mktemp('fsmdir')
        self.partials_dir = self.mktemp('partials_dir')
        self.root_dir = self.mktemp('root_dir')
        self.vm = FakeVolumeManager(self.root_dir)
        self.db = tritcask.Tritcask(self.mktemp('tritcask'))
        self.addCleanup(self.db.shutdown)
        self.fs = filesystem_manager.FileSystemManager(self.fsmdir,
                                                       self.partials_dir,
                                                       self.vm, self.db)
        self.fs.create(path=self.root_dir,
                       share_id='', is_dir=True)
        self.fs.set_by_path(path=self.root_dir,
                              local_hash=None, server_hash=None)
        self.eq = event_queue.EventQueue(self.fs,
                                         monitor_class=self._monitor_class)
        self.eq.listener_map = {}
        self.addCleanup(self.eq.shutdown)
        self.fs.register_eq(self.eq)

        # add a Memento handler to the logger
        self.log_handler = MementoHandler()
        self.log_handler.setLevel(logging.DEBUG)
        self.eq.log.addHandler(self.log_handler)
Пример #3
0
        def auth(client):

            yield client.dummy_authenticate('open sesame')
            d = defer.Deferred()
            client.connectionLostHandler = d.callback
            # add the log handler
            logger = logging.getLogger('storage.server')
            hdlr = MementoHandler()
            hdlr.setLevel(logging.INFO)
            logger.addHandler(hdlr)
            # patch the looping ping values
            server = self.service.factory.protocols[0]
            server.ping_loop.interval = 0.1
            server.ping_loop.idle_timeout = 0.3
            # reschedule the ping loop
            server.ping_loop.reset()
            try:
                yield d
            except ConnectionDone:
                msg = "Disconnecting - idle timeout"
                self.assertTrue(hdlr.check_info(msg))
            else:
                self.fail("Should get disconnected.")
            finally:
                logger.removeHandler(hdlr)
Пример #4
0
class FilterTests(unittest.TestCase):
    """Tests log filters"""

    @defer.inlineCallbacks
    def setUp(self):
        """Setup the logger and the handler"""
        yield super(FilterTests, self).setUp()
        self.handler = MementoHandler()
        self.handler.setLevel(logging.DEBUG)
        root_logger.addHandler(self.handler)
        self.addCleanup(root_logger.removeHandler, self.handler)

        if filesystem_logger is not None:
            filesystem_logger.addHandler(self.handler)
            self.addCleanup(filesystem_logger.removeHandler, self.handler)

        twisted_logger.addHandler(self.handler)
        self.addCleanup(twisted_logger.removeHandler, self.handler)

        self.addCleanup(self.handler.close)

    @skipIfOS('win32', 'There is not filesystem_logger implementation in '\
                       'windows yet, see bug #823316.')
    def test_multiple_filters(self):
        """Tests logging with more than one filter."""
        test_logger = logging.getLogger('ubuntuone.SyncDaemon.FilterTest')
        test_logger.debug('debug info 0')
        self.assertEquals(1, len(self.handler.records))
        self.handler.addFilter(MultiFilter(['ubuntuone.SyncDaemon', 'twisted', 'pyinotify']))
        test_logger.debug('debug info 1')
        self.assertEquals(2, len(self.handler.records))
class SSLTestCase(BaseSSLTestCase):
    """Test error handling when dealing with ssl."""

    @defer.inlineCallbacks
    def setUp(self):
        """Set the diff tests."""
        yield super(SSLTestCase, self).setUp()

        self.memento = MementoHandler()
        self.memento.setLevel(logging.DEBUG)
        logger = webclient.webclient_module().logger
        logger.addHandler(self.memento)
        self.addCleanup(logger.removeHandler, self.memento)

        self.wc = webclient.webclient_factory()
        self.addCleanup(self.wc.shutdown)

        self.called = []

    def test_ssl_fail(self):
        """Test showing the dialog and rejecting."""
        self.failUnlessFailure(self.wc.request(
                self.base_iri + SIMPLERESOURCE), WebClientError)
        self.assertNotEqual(None, self.memento.check_error('SSL errors'))

    if (WEBCLIENT_MODULE_NAME.endswith(".txweb") or
            WEBCLIENT_MODULE_NAME.endswith(".libsoup")):
        reason = 'SSL support has not yet been implemented.'
        test_ssl_fail.skip = reason
Пример #6
0
 def test_message(self):
     """Just a message."""
     handler = MementoHandler()
     handler.setLevel(logging.DEBUG)
     deferror_handler(dict(isError=True, message="foobar"))
     self.assertFalse(handler.check_error("Unhandled error in deferred",
                                          "foobar"))
Пример #7
0
    def test_disconnect_with_user_locked_after_auth(self):
        """Client gets disconnected if the user is locked after auth."""
        # add the log handler
        logger = logging.getLogger('storage.server')
        hdlr = MementoHandler()
        hdlr.setLevel(logging.INFO)
        logger.addHandler(hdlr)
        # define a connectionLostHandler to know when the client
        # gets disconnected.
        d = defer.Deferred()

        def conn_lost_handler(r):
            """Connection lost!"""
            d.callback(None)

        @defer.inlineCallbacks
        def dummy(client):
            # set the connection lost handler
            client.connectionLostHandler = conn_lost_handler
            # trigger an operation, which should
            yield client.dummy_authenticate("open sesame")
            root_id = yield client.get_root()
            # lock the user:
            usr = self.user_store.get(model.StorageUser, 0)
            usr.locked = True
            self.user_store.commit()
            client.make_dir(request.ROOT, root_id, u"open sesame")
            yield d
            # check we logged a warning about this.
            self.assertTrue(
                hdlr.check_warning("Shutting down protocol: user locked"))

        return self.callback_test(dummy, add_default_callbacks=True)
Пример #8
0
        def auth(client):

            yield client.dummy_authenticate('open sesame')
            d = defer.Deferred()
            client.connectionLostHandler = d.callback
            # add the log handler
            logger = logging.getLogger('storage.server')
            hdlr = MementoHandler()
            hdlr.setLevel(logging.INFO)
            logger.addHandler(hdlr)
            # patch the looping ping values
            server = self.service.factory.protocols[0]
            server.ping_loop.interval = 0.1
            server.ping_loop.idle_timeout = 0.3
            # reschedule the ping loop
            server.ping_loop.reset()
            try:
                yield d
            except ConnectionDone:
                msg = "Disconnecting - idle timeout"
                self.assertTrue(hdlr.check_info(msg))
            else:
                self.fail("Should get disconnected.")
            finally:
                logger.removeHandler(hdlr)
Пример #9
0
 def test_failure(self):
     """Received a full failure."""
     handler = MementoHandler()
     handler.setLevel(logging.DEBUG)
     f = failure.Failure(ValueError('foobar'))
     deferror_handler(dict(isError=True, failure=f, message=''))
     self.assertFalse(handler.check_error("Unhandled error in deferred",
                                          "ValueError", "foobar"))
Пример #10
0
 def setUp(self):
     yield super(ClientDummyAuthTests, self).setUp()
     self.creds = 'open sesame'
     self.bad_creds = 'not my secret'
     self.handler = MementoHandler()
     logger = logging.getLogger('storage.server')
     logger.addHandler(self.handler)
     self.addCleanup(logger.removeHandler, self.handler)
     self.handler.setLevel(logging.DEBUG)
class OSWrapperTests(test_os_helper.OSWrapperTests):
    """Tests for os wrapper functions."""

    @defer.inlineCallbacks
    def setUp(self, test_dir_name=None, test_file_name=None,
              valid_file_path_builder=None):
        """Set up."""
        yield super(OSWrapperTests, self).setUp(
            test_dir_name=test_dir_name, test_file_name=test_file_name,
            valid_file_path_builder=None)
        self.handler = MementoHandler()
        self.handler.setLevel(logging.DEBUG)
        self._logger = logging.getLogger('ubuntuone.SyncDaemon')
        self._logger.addHandler(self.handler)
        self.addCleanup(self._logger.removeHandler, self.handler)
        self.patch(darwin.shutil, "move", self._fake_move)

    def _fake_move(*args):
        """Fake shutil move."""
        raise Exception("Fail fake move")

    def test_stat_symlink(self):
        """Test that it doesn't follow symlinks.

        We compare the inode only (enough to see if it's returning info
        from the link or the linked), as we can not compare the full stat
        because the st_mode will be different.
        """
        link = os.path.join(self.basedir, 'foo')
        os.symlink(self.testfile, link)
        self.assertNotEqual(os.stat(link).st_ino, stat_path(link).st_ino)
        self.assertEqual(os.lstat(link).st_ino, stat_path(link).st_ino)

    def test_movetotrash_file_bad(self):
        """Something bad happen when moving to trash, removed anyway."""
        path = os.path.join(self.basedir, 'foo')
        open_file(path, 'w').close()
        move_to_trash(path)
        self.assertFalse(os.path.exists(path))
        self.assertTrue(self.handler.check_warning("Problems moving to trash!",
                                                   "Removing anyway", "foo"))

    def test_movetotrash_file_not_exists(self):
        """Something bad happen when moving to trash, removed anyway."""
        path = os.path.join(self.basedir, 'foo2')
        self.assertFalse(os.path.exists(path))
        self.assertRaises(OSError, move_to_trash, path)

    def test_movetotrash_dir_bad(self):
        """Something bad happen when moving to trash, removed anyway."""
        path = os.path.join(self.basedir, 'foo')
        os.mkdir(path)
        open_file(os.path.join(path, 'file inside directory'), 'w').close()
        move_to_trash(path)
        self.assertFalse(os.path.exists(path))
        self.assertTrue(self.handler.check_warning("Problems moving to trash!",
                                                   "Removing anyway", "foo"))
Пример #12
0
 def setUp(self):
     yield super(HeartbeatWriterTest, self).setUp()
     self.logger = logging.Logger("HeartbeatWriter.test")
     self.handler = MementoHandler()
     self.logger.addHandler(self.handler)
     self.addCleanup(self.logger.removeHandler, self.handler)
     self.clock = task.Clock()
     self.hw = HeartbeatWriter(self.interval,
                               self.logger,
                               reactor=self.clock)
Пример #13
0
class RemovableSignalTestCase(TestCase):
    """Tests for RemovableSignal."""

    @defer.inlineCallbacks
    def setUp(self):
        yield super(RemovableSignalTestCase, self).setUp()
        self.proxy = FakeSSOProxy()

    def test_creation(self):
        """When creating, bind properly to self.proxy."""
        rs = RemovableSignal(self.proxy, "test", lambda *a: None)
        self.assertIs(self.proxy.test, rs)

    def test_dunder_callable(self):
        """__call__ works as expected."""
        sample_store = []
        expected = object()
        test_cb = lambda res: sample_store.append(res)
        rs = RemovableSignal(self.proxy, "on_credentials_found_cb", test_cb)
        rs(APP_NAME, expected)
        self.assertEqual(sample_store, [expected])

    def test_callable_does_not_log_args(self):
        """__call__ does not log its arguments."""
        self.handler = MementoHandler()
        self.handler.setLevel(logging.DEBUG)
        logger.addHandler(self.handler)
        self.addCleanup(logger.removeHandler, self.handler)

        secret_token = "secret token!"
        test_cb = lambda _: None
        rs = RemovableSignal(self.proxy, "on_credentials_found_cb", test_cb)

        rs(APP_NAME, {"secret": secret_token})
        for record in self.handler.records:
            self.assertNotIn(secret_token, record.message)

    def test_dunder_filters_other_apps(self):
        """__call__ filters by app_name."""
        sample_store = []
        test_cb = lambda res: sample_store.append(res)
        rs = RemovableSignal(self.proxy, "on_credentials_found_cb", test_cb)
        rs('other app name', object())
        self.assertEqual(sample_store, [])

    def test_remove(self):
        """The signal has a .remove that removes the callback."""
        sample_store = []
        test_cb = lambda app_name, creds: sample_store.append(creds)
        rs = RemovableSignal(self.proxy, "on_credentials_found_cb", test_cb)
        rs.remove()
        rs(TEST_APP_NAME, TEST_CREDENTIALS)
        self.assertEqual(len(sample_store), 0)
Пример #14
0
 def setUp(self):
     super(RestHelperTestCase, self).setUp()
     self.handler = MementoHandler()
     self.user = self.obj_factory.make_user(1, u"bob", u"bobby boo",
                                            2 * (2**30))
     self.mapper = ResourceMapper()
     logger = logging.getLogger("test")
     logger.addHandler(self.handler)
     logger.setLevel(logging.INFO)
     logger.propagate = False
     self.helper = RestHelper(self.mapper, logger=logger)
     self.store = self.get_shard_store(self.user.shard_id)
class GetProjectDirTestCase(TestCase):
    """Test case for get_project_dir when constants module is not defined."""

    DIR_NAME = utils.DATA_SUFFIX
    DIR_CONSTANT = 'PROJECT_DIR'
    DIR_GETTER = 'get_project_dir'

    @defer.inlineCallbacks
    def setUp(self):
        yield super(GetProjectDirTestCase, self).setUp()
        self._constants = sys.modules.get(CONSTANTS_MODULE, NOT_DEFINED)
        sys.modules[CONSTANTS_MODULE] = None  # force ImportError

        self.memento = MementoHandler()
        self.memento.setLevel(logging.DEBUG)
        utils.logger.addHandler(self.memento)
        self.addCleanup(utils.logger.removeHandler, self.memento)

        self.get_dir = getattr(utils, self.DIR_GETTER)

    @defer.inlineCallbacks
    def tearDown(self):
        if self._constants is not NOT_DEFINED:
            sys.modules[CONSTANTS_MODULE] = self._constants
        else:
            sys.modules.pop(CONSTANTS_MODULE)
        yield super(GetProjectDirTestCase, self).tearDown()

    def test_get_dir_relative(self):
        """The relative path for the data directory is correctly retrieved."""
        module = utils.os.path.dirname(utils.__file__)
        rel_data = utils.os.path.join(module,
                                      utils.os.path.pardir,
                                      utils.os.path.pardir,
                                      self.DIR_NAME)
        expected_dir = utils.os.path.abspath(rel_data)

        # ensure expected_path exists at os level
        self.patch(utils.os.path, 'exists', lambda path: path == expected_dir)

        result = self.get_dir()
        self.assertEqual(expected_dir, result)

    def test_get_dir_none_exists(self):
        """No data directory exists, return None and log as error."""
        self.patch(utils.os.path, 'exists', lambda path: False)
        sys.modules[CONSTANTS_MODULE] = None

        self.assertRaises(AssertionError, self.get_dir)
        msg = 'get_dir: can not build a valid path.'
        self.assertTrue(self.memento.check_error(msg))
    def setUp(self):
        super(StatsWorkerTestCase, self).setUp()
        self.mocker = Mocker()
        self.rpc = self.mocker.mock()
        self.worker = stats_worker.StatsWorker(10, '', self.rpc)

        # logging setup
        self.handler = MementoHandler()
        self.worker.logger.addHandler(self.handler)
        self.addCleanup(self.worker.logger.removeHandler, self.handler)
        self.worker.logger.setLevel(logging.DEBUG)
        self.handler.setLevel(logging.DEBUG)
        self.worker.logger.propagate = False
        self.handler.debug = True
    def test_logger_can_be_given(self):
        """Accept an external logger."""
        logger = logging.getLogger("ubuntuone.SyncDaemon.Test")
        handler = MementoHandler()
        logger.addHandler(handler)
        logger.setLevel(logging.DEBUG)
        logger.propagate = False

        # acquire and test
        release = yield self.plt.acquire('path', logger=logger)
        self.assertTrue(handler.check_debug("acquiring on"))

        # release and test
        release()
        self.assertTrue(handler.check_debug("releasing"))
Пример #18
0
    def test_retryable_transaction_InternalError(self):
        """Retryable_transaction retry and log on InternalError."""
        sleep = self.mocker.replace('time.sleep')
        self.expect(sleep(mocker.ANY)).count(2)
        self.mocker.replay()

        logger = logging.getLogger('storage.server.txn')
        h = MementoHandler()
        logger.addHandler(h)

        calls = []

        @retryable_transaction()
        def function():
            """Fail with InternalError."""
            if len(calls) < 1:
                calls.append('function')
                raise InternalError('internal error')
            else:
                calls.append('function')

        function()
        logger.removeHandler(h)
        self.assertEqual(calls, ['function'] * 2)
        self.assertEqual(1, len(h.records))
        self.assertIn('Got an InternalError, retrying',
                      h.records[0].getMessage())
        self.assertEqual('internal error', h.records[0].exc_info[1].message)
Пример #19
0
 def setUp(self):
     """Setup the logger and the handler"""
     yield super(DebugCaptureTest, self).setUp()
     self.handler = MementoHandler()
     self.logger = logging.getLogger(self.__class__.__name__)
     self.logger.addHandler(self.handler)
     self.logger.setLevel(logging.DEBUG)
 def setUp(self):
     yield super(TestServerScan, self).setUp()
     yield self.get_client()
     yield self.do_create_lots_of_files('_pre')
     self.handler = handler = MementoHandler()
     handler.setLevel(logging.DEBUG)
     logging.getLogger('fsyncsrvr.SyncDaemon').addHandler(handler)
    def setUp(self):
        """Set up."""
        yield super(BaseFSMonitorTestCase, self).setUp()
        fsmdir = self.mktemp('fsmdir')
        partials_dir = self.mktemp('partials_dir')
        self.root_dir = self.mktemp('root_dir')
        self.vm = FakeVolumeManager(self.root_dir)
        self.tritcask_dir = self.mktemp("tritcask_dir")
        self.db = Tritcask(self.tritcask_dir)
        self.addCleanup(self.db.shutdown)
        self.fs = filesystem_manager.FileSystemManager(fsmdir, partials_dir,
                                                       self.vm, self.db)
        self.fs.create(path=self.root_dir, share_id='', is_dir=True)
        self.fs.set_by_path(path=self.root_dir,
                            local_hash=None, server_hash=None)
        eq = event_queue.EventQueue(self.fs)

        self.deferred = deferred = defer.Deferred()

        class HitMe(object):
            # class-closure, cannot use self, pylint: disable-msg=E0213
            def handle_default(innerself, event, **args):
                deferred.callback(True)

        eq.subscribe(HitMe())
        self.monitor = eq.monitor
        self.addCleanup(self.monitor.shutdown)
        self.log_handler = MementoHandler()
        self.log_handler.setLevel(logging.DEBUG)
        self.monitor.log.addHandler(self.log_handler)
        self.addCleanup(self.monitor.log.removeHandler, self.log_handler)
Пример #22
0
 def configure_logging(self):
     """Configure logging for the tests."""
     logger = logging.getLogger("ssl_proxy")
     logger.setLevel(logging.DEBUG)
     logger.propagate = False
     self.handler = MementoHandler()
     logger.addHandler(self.handler)
     self.addCleanup(logger.removeHandler, self.handler)
Пример #23
0
 def setUp(self):
     """Set up."""
     yield super(OSWrapperTests, self).setUp()
     self.handler = MementoHandler()
     self.handler.setLevel(logging.DEBUG)
     self._logger = logging.getLogger('ubuntuone.SyncDaemon')
     self._logger.addHandler(self.handler)
     self.addCleanup(self._logger.removeHandler, self.handler)
    def test_gsettings_cannot_parse(self):
        """Some weird setting that cannot be parsed is logged with warning."""
        memento = MementoHandler()
        memento.setLevel(logging.DEBUG)
        gsettings.logger.addHandler(memento)
        self.addCleanup(gsettings.logger.removeHandler, memento)

        troublesome_value = "#bang"
        template_values = dict(BASE_GSETTINGS_VALUES)
        template_values["ignore_hosts"] = troublesome_value
        fake_output = TEMPLATE_GSETTINGS_OUTPUT.format(**template_values)
        self.patch(gsettings.subprocess, "check_output",
                   lambda _: fake_output)
        ps = gsettings.get_proxy_settings()
        self.assertTrue(memento.check_warning(gsettings.CANNOT_PARSE_WARNING %
                                              troublesome_value))
        self.assertEqual(ps, {})
Пример #25
0
    def test_called_back_log_ok(self):
        """Test that the hasher produces correct info."""
        # create the hasher
        mark = object()
        queue = hash_queue.UniqueQueue()
        d = defer.Deferred()
        class Helper(object):
            """Helper class."""
            def push(self, event, **kwargs):
                """Callback."""
                d.callback(kwargs)
        receiver = Helper()
        hasher = hash_queue._Hasher(queue, mark, receiver)

        # log config
        handler = MementoHandler()
        handler.setLevel(logging.DEBUG)
        hasher.logger.addHandler(handler)

        # send what to hash
        testfile = os.path.join(self.test_dir, "testfile")
        with open_file(testfile, "wb") as fh:
            fh.write("foobar")
        item = ((testfile, "mdid"), FAKE_TIMESTAMP)
        queue.put(item)

        # start the hasher after putting the work items
        hasher.start()

        # wait event and stop hasher
        yield d
        hasher.stop()
        hasher.join(timeout=5)

        # check log
        log_msg = [r.message for r in handler.records
                   if "path hash pushed" in r.message][0]
        self.assertTrue("path" in log_msg)
        self.assertTrue("hash" in log_msg)
        self.assertTrue("crc" in log_msg)
        self.assertTrue("size" in log_msg)
        self.assertTrue("st_ino" in log_msg)
        self.assertTrue("st_size" in log_msg)
        self.assertTrue("st_mtime" in log_msg)
        hasher.logger.removeHandler(handler)
Пример #26
0
 def setUp(self):
     """Set up the test."""
     self.qc = QueueContent(home='/')
     self.handler = MementoHandler()
     self.handler.setLevel(logging.DEBUG)
     logger = logging.getLogger('magicicada.queue_content')
     logger.addHandler(self.handler)
     logger.setLevel(logging.DEBUG)
     self.addCleanup(logger.removeHandler, self.handler)
Пример #27
0
 def setUp(self):
     yield super(ClientDummyAuthTests, self).setUp()
     self.creds = "open sesame"
     self.bad_creds = "not my secret"
     self.handler = MementoHandler()
     logger = logging.getLogger("storage.server")
     logger.addHandler(self.handler)
     self.addCleanup(logger.removeHandler, self.handler)
     self.handler.setLevel(logging.DEBUG)
Пример #28
0
 def setUp(self):
     yield super(HeartbeatWriterTest, self).setUp()
     self.logger = logging.Logger("HeartbeatWriter.test")
     self.handler = MementoHandler()
     self.logger.addHandler(self.handler)
     self.addCleanup(self.logger.removeHandler, self.handler)
     self.clock = task.Clock()
     self.hw = HeartbeatWriter(self.interval, self.logger,
                               reactor=self.clock)
Пример #29
0
    def setUp(self):
        yield super(SignalBroadcasterTestCase, self).setUp()
        self.client = FakeRemoteClient()
        self.sb = ipc.SignalBroadcaster()

        self.memento = MementoHandler()
        ipc.logger.addHandler(self.memento)
        ipc.logger.setLevel(logging.DEBUG)
        self.addCleanup(ipc.logger.removeHandler, self.memento)
Пример #30
0
    def test_logs(self):
        """Unhandled exceptions logs in error."""
        # set up logger
        handler = MementoHandler()
        handler.setLevel(logging.DEBUG)
        l = logging.getLogger('magicicada')

        # call
        l.addHandler(handler)
        self.addCleanup(l.removeHandler, handler)
        exc = self._get_exception_data()
        try:
            exception_handler(*exc)
        finally:
            l.removeHandler(handler)

        # check
        self.assertTrue(handler.check_error("Unhandled exception",
                                            "ZeroDivisionError"))
    def setUp(self):
        """Initialize this testcase."""
        yield super(LogginTestCase, self).setUp()
        self.memento = MementoHandler()
        restful.logger.addHandler(self.memento)
        restful.logger.setLevel(logging.DEBUG)
        self.addCleanup(restful.logger.removeHandler, self.memento)

        self.rc = restful.RestfulClient(SAMPLE_SERVICE_IRI)
        self.addCleanup(self.rc.shutdown)
    def setUp(self):
        """Set up."""
        yield super(LoggingTests, self).setUp()
        self.plt = PathLockingTree()

        self.handler = MementoHandler()
        self.plt.logger.setLevel(logging.DEBUG)
        self.plt.logger.propagate = False
        self.plt.logger.addHandler(self.handler)
        self.addCleanup(self.plt.logger.removeHandler, self.handler)
Пример #33
0
class BaseTestCase(DBusTestCase):
    """Base test case."""

    timeout = 8
    app_name = APP_NAME
    error_dict = None

    @inlineCallbacks
    def setUp(self):
        yield super(BaseTestCase, self).setUp()
        FakedSSOService.app_name = self.app_name
        FakedSSOService.error_dict = self.error_dict

        self.memento = MementoHandler()
        self.memento.setLevel(logging.DEBUG)
        logger.addHandler(self.memento)

        self.sso_server = self.register_server(ubuntu_sso.DBUS_BUS_NAME,
                                ubuntu_sso.DBUS_CREDENTIALS_PATH,
                                FakedSSOService)  # faked SSO server
        self.args = {'window_id': '803'}

    def register_server(self, bus_name, object_path, service_class):
        """Register a service on the session bus."""
        name = self.bus.request_name(bus_name, dbus.bus.NAME_FLAG_DO_NOT_QUEUE)
        self.assertNotEqual(name, dbus.bus.REQUEST_NAME_REPLY_EXISTS,
                            'Service %s should not be running.' % bus_name)
        mock = service_class(object_path=object_path, conn=self.bus)
        self.addCleanup(mock.remove_from_connection)
        self.addCleanup(self.bus.release_name, bus_name)

        return mock

    def get_proxy(self, bus_name, object_path, dbus_interface):
        obj = self.bus.get_object(bus_name=bus_name, object_path=object_path,
                                  follow_name_owner_changes=True)
        proxy = dbus.Interface(object=obj, dbus_interface=dbus_interface)
        return proxy

    def get_sso_proxy(self):
        return self.get_proxy(bus_name=ubuntu_sso.DBUS_BUS_NAME,
                              object_path=ubuntu_sso.DBUS_CREDENTIALS_PATH,
                              dbus_interface=ubuntu_sso.DBUS_CREDENTIALS_IFACE)
Пример #34
0
 def setUp(self):
     super(RestHelperTestCase, self).setUp()
     self.handler = MementoHandler()
     self.user = self.obj_factory.make_user(
         1, "bob", "bobby boo", 2 * (2 ** 30))
     self.mapper = ResourceMapper()
     logger = logging.getLogger("test")
     logger.addHandler(self.handler)
     logger.setLevel(logging.INFO)
     logger.propagate = False
     self.helper = RestHelper(self.mapper, logger=logger)
 def setUp(self):
     """Set up."""
     self.handler = MementoHandler()
     self.handler.setLevel(logging.DEBUG)
     logger = logging.getLogger("ubuntuone.SyncDaemon.OffloadQueue")
     logger.setLevel(logging.DEBUG)
     logger.addHandler(self.handler)
     self.addCleanup(logger.removeHandler, self.handler)
     self.oq = OffloadQueue()
     self.addCleanup(self.oq._tempfile.close)
     return super(OffloadQueueTestCase, self).setUp()
    def setUp(self):
        yield super(GetProjectDirTestCase, self).setUp()
        self._constants = sys.modules.get(CONSTANTS_MODULE, NOT_DEFINED)
        sys.modules[CONSTANTS_MODULE] = None  # force ImportError

        self.memento = MementoHandler()
        self.memento.setLevel(logging.DEBUG)
        utils.logger.addHandler(self.memento)
        self.addCleanup(utils.logger.removeHandler, self.memento)

        self.get_dir = getattr(utils, self.DIR_GETTER)
Пример #37
0
class BaseFSMonitorTestCase(testcase.BaseTwistedTestCase):
    """Test the structures where we have the path/watch."""

    timeout = 3

    @defer.inlineCallbacks
    def setUp(self):
        """Set up."""
        yield super(BaseFSMonitorTestCase, self).setUp()
        fsmdir = self.mktemp('fsmdir')
        partials_dir = self.mktemp('partials_dir')
        self.root_dir = self.mktemp('root_dir')
        self.vm = testcase.FakeVolumeManager(self.root_dir)
        self.tritcask_dir = self.mktemp("tritcask_dir")
        self.db = Tritcask(self.tritcask_dir)
        self.fs = filesystem_manager.FileSystemManager(fsmdir, partials_dir,
                                                       self.vm, self.db)
        self.fs.create(path=self.root_dir, share_id='', is_dir=True)
        self.fs.set_by_path(path=self.root_dir,
                            local_hash=None, server_hash=None)
        eq = event_queue.EventQueue(self.fs)

        self.deferred = deferred = defer.Deferred()

        class HitMe(object):
            # class-closure, cannot use self, pylint: disable-msg=E0213
            def handle_default(innerself, event, **args):
                reactor.callLater(.1, deferred.callback, True)

        eq.subscribe(HitMe())
        self.monitor = eq.monitor
        self.log_handler = MementoHandler()
        self.log_handler.setLevel(logging.DEBUG)
        self.monitor.log.addHandler(self.log_handler)

    @defer.inlineCallbacks
    def tearDown(self):
        """Clean up the tests."""
        self.monitor.shutdown()
        self.monitor.log.removeHandler(self.log_handler)
        yield super(BaseFSMonitorTestCase, self).tearDown()
 def setUp(self, test_dir_name=None, test_file_name=None,
           valid_file_path_builder=None):
     """Set up."""
     yield super(OSWrapperTests, self).setUp(
         test_dir_name=test_dir_name, test_file_name=test_file_name,
         valid_file_path_builder=None)
     self.handler = MementoHandler()
     self.handler.setLevel(logging.DEBUG)
     self._logger = logging.getLogger('ubuntuone.SyncDaemon')
     self._logger.addHandler(self.handler)
     self.addCleanup(self._logger.removeHandler, self.handler)
     self.patch(darwin.shutil, "move", self._fake_move)
class LogginTestCase(BaseTestCase):
    """Ensure that proper debug logging is done."""

    @defer.inlineCallbacks
    def setUp(self):
        """Initialize this testcase."""
        yield super(LogginTestCase, self).setUp()
        self.memento = MementoHandler()
        restful.logger.addHandler(self.memento)
        restful.logger.setLevel(logging.DEBUG)
        self.addCleanup(restful.logger.removeHandler, self.memento)

        self.rc = restful.RestfulClient(SAMPLE_SERVICE_IRI)
        self.addCleanup(self.rc.shutdown)

    @defer.inlineCallbacks
    def test_log_rest_call(self):
        """Check that proper DEBUG is made for every REST call."""
        yield self.rc.restcall(SAMPLE_OPERATION, **SAMPLE_ARGS)

        expected_msgs = (
            SAMPLE_SERVICE_IRI + SAMPLE_NAMESPACE,
        )
        self.assertTrue(self.memento.check_debug(*expected_msgs))

    @defer.inlineCallbacks
    def test_log_json_loads_exception(self):
        """Check that json load errors are properly logged."""
        invalid_json = 'NOTAVALIDJSON'
        self.patch(self.wc, 'return_value', invalid_json)
        yield self.assertFailure(self.rc.restcall(SAMPLE_OPERATION),
                                 ValueError)

        self.memento.debug = True
        expected_msgs = (
            ValueError,
            'Can not load json from REST request response',
            invalid_json
        )
        self.assertTrue(self.memento.check_exception(*expected_msgs))
 def setUp(self):
     super(HeartbeatListenerTestCase, self).setUp()
     self.stdin = StringIO()
     self.stdout = StringIO()
     self.stderr = StringIO()
     self.mocker = Mocker()
     self.rpc = self.mocker.mock()
     self.listener = HeartbeatListener(1,
                                       10, ['foo'], [],
                                       self.rpc,
                                       stdin=self.stdin,
                                       stdout=self.stdout,
                                       stderr=self.stderr)
     self.next_fail = {}
     self.handler = MementoHandler()
     self.listener.logger.addHandler(self.handler)
     self.listener.logger.setLevel(logging.DEBUG)
     self.handler.setLevel(logging.DEBUG)
     self.listener.logger.propagate = False
     self.processes = [
         dict(name="heartbeat", group="heartbeat", pid="101", state=RUNNING)
     ]
     self.handler.debug = True
Пример #41
0
    def test__check_stores_and_invalidate(self):
        """Test _check_stores_and_invalidate invalidate case."""
        from backends.filesync.data.services import make_storage_user
        from backends.filesync.data.model import StorageObject

        logger = logging.getLogger('storage.server.noninv')

        h = MementoHandler()
        logger.addHandler(h)

        make_storage_user(1, u'foo', u'foo', 10000, u'shard2')
        sto = get_store('shard2', storage_zstorm)
        self._sto = sto  # for later cleanup
        obj = StorageObject(1, u'foo', u'File')
        sto.add(obj)
        sto.flush()
        self.assertFalse(obj.__storm_object_info__.get("invalidated", False))
        _check_stores_and_invalidate(storage_zstorm)
        self.assertTrue(obj.__storm_object_info__.get("invalidated", False))
        self.assertEqual(1, len(h.records))
        self.assertEqual((obj, ), h.records[0].args)
class StatsWorkerTestCase(TestCase):
    """Tests for StatsWorker class."""
    def setUp(self):
        super(StatsWorkerTestCase, self).setUp()
        self.mocker = Mocker()
        self.rpc = self.mocker.mock()
        self.worker = stats_worker.StatsWorker(10, '', self.rpc)

        # logging setup
        self.handler = MementoHandler()
        self.worker.logger.addHandler(self.handler)
        self.addCleanup(self.worker.logger.removeHandler, self.handler)
        self.worker.logger.setLevel(logging.DEBUG)
        self.handler.setLevel(logging.DEBUG)
        self.worker.logger.propagate = False
        self.handler.debug = True

    def test_collect_stats(self):
        """Test the collect_stats method."""
        called = []
        self.worker._collect_process = \
            lambda p, n: called.append(('proc', p, n)) or {}
        self.worker._collect_machine = lambda: called.append('machine') or {}
        processes = [dict(name="bar", group="foo", pid="42", state=RUNNING)]
        expect(self.rpc.supervisor.getAllProcessInfo()).result(processes)
        with self.mocker:
            self.worker.collect_stats()
        self.assertEqual(called, ['machine', ('proc', 42, 'bar')])
        self.assertTrue(self.handler.check_info("Collecting machine stats"))
        self.assertTrue(
            self.handler.check_info("Collecting stats for proc", "pid=42",
                                    "name=bar"))

    def test_collect_stats_not_running(self):
        """Test the collect_stats method if the proccess isn't running."""
        called = []
        self.worker._collect_process = \
            lambda p, n: called.append(('proc', p, n)) or {}
        self.worker._collect_machine = lambda: called.append('machine') or {}
        processes = [dict(name="bar", group="foo", pid="42", state=STARTING)]
        expect(self.rpc.supervisor.getAllProcessInfo()).result(processes)
        with self.mocker:
            self.worker.collect_stats()
        self.assertEqual(called, ['machine'])
        self.assertTrue(self.handler.check_info("Collecting machine stats"))
        self.assertTrue(
            self.handler.check_info("Ignoring process", "pid=42", "name=bar",
                                    "state=%s" % STARTING))

    def test_collect_stats_no_data(self):
        """Test the collect_stats method with no data of a process."""
        called = []
        self.worker._collect_process = \
            lambda p, n: called.append(('proc', p, n)) or {}
        self.worker._collect_machine = lambda: called.append('machine') or {}
        expect(self.rpc.supervisor.getAllProcessInfo()).result([])
        with self.mocker:
            self.worker.collect_stats()
        self.assertEqual(called, ['machine'])
        self.assertTrue(self.handler.check_info("Collecting machine stats"))

    def test_collect_process_info_new_report(self):
        """Check how the process info is collected first time."""
        mocker = Mocker()
        assert not self.worker.process_cache

        # patch Process to return our mock for test pid
        Process = mocker.mock()
        self.patch(stats_worker.psutil, 'Process', Process)
        proc = mocker.mock()
        pid = 1234
        expect(Process(pid)).result(proc)

        # patch ProcessReport to return or mock for given proc
        ProcessReport = mocker.mock()
        self.patch(stats_worker, 'ProcessReport', ProcessReport)
        proc_report = mocker.mock()
        expect(ProcessReport(proc)).result(proc_report)

        # expect to get called with some info, return some results
        name = 'test_proc'
        result = object()
        expect(proc_report.get_memory_and_cpu(prefix=name)).result(result)

        with mocker:
            real = self.worker._collect_process(pid, name)
        self.assertIdentical(real, result)

    def test_collect_process_info_old_report(self):
        """Check how the process info is collected when cached."""
        mocker = Mocker()

        # put it in the cache
        pid = 1234
        proc_report = mocker.mock()
        self.worker.process_cache[pid] = proc_report

        # expect to get called with some info, return some results
        name = 'test_proc'
        result = object()
        expect(proc_report.get_memory_and_cpu(prefix=name)).result(result)

        with mocker:
            real = self.worker._collect_process(pid, name)
        self.assertIdentical(real, result)

    def test_collect_system_info(self):
        """Check how the system info is collected."""
        mocker = Mocker()

        # change the constant to assure it's used as we want
        result1 = dict(a=3, b=5)
        result2 = dict(c=7)
        fake = (lambda: result1, lambda: result2)
        self.patch(stats_worker, 'SYSTEM_STATS', fake)

        with mocker:
            result = self.worker._collect_machine()

        should = {}
        should.update(result1)
        should.update(result2)
        self.assertEqual(result, should)

    def test_informed_metrics(self):
        """Check how stats are reported."""
        # prepare a lot of fake info that will be "collected"
        machine_info = dict(foo=3, bar=5)
        process_info = {
            1: dict(some=1234, other=4567),
            2: dict(some=9876, other=6543),
        }
        self.worker._collect_process = lambda pid, name: process_info[pid]
        self.worker._collect_machine = lambda: machine_info
        processes = [
            dict(name="proc1", group="", pid="1", state=RUNNING),
            dict(name="proc2", group="", pid="2", state=RUNNING),
        ]
        expect(self.rpc.supervisor.getAllProcessInfo()).result(processes)

        # patch the metric reporter to see what is sent
        reported = set()
        self.worker.metrics.gauge = lambda *a: reported.add(a)

        # what we should get is...
        should = set([
            ('foo', 3),
            ('bar', 5),
            ('some', 1234),
            ('other', 4567),
            ('some', 9876),
            ('other', 6543),
        ])
        with self.mocker:
            self.worker.collect_stats()
        self.assertEqual(reported, should)
class HeartbeatListenerTestCase(TestCase):
    """Tests for HeartbeatListener class."""
    def setUp(self):
        super(HeartbeatListenerTestCase, self).setUp()
        self.stdin = StringIO()
        self.stdout = StringIO()
        self.stderr = StringIO()
        self.mocker = Mocker()
        self.rpc = self.mocker.mock()
        self.listener = HeartbeatListener(1,
                                          10, ['foo'], [],
                                          self.rpc,
                                          stdin=self.stdin,
                                          stdout=self.stdout,
                                          stderr=self.stderr)
        self.next_fail = {}
        self.handler = MementoHandler()
        self.listener.logger.addHandler(self.handler)
        self.listener.logger.setLevel(logging.DEBUG)
        self.handler.setLevel(logging.DEBUG)
        self.listener.logger.propagate = False
        self.processes = [
            dict(name="heartbeat", group="heartbeat", pid="101", state=RUNNING)
        ]
        self.handler.debug = True

    def tearDown(self):
        self.listener.logger.removeHandler(self.handler)
        self.handler.close()
        self.next_fail = None
        self.handler = None
        self.listener = None
        super(HeartbeatListenerTestCase, self).tearDown()

    def fail_next_stop(self, pname):
        """Make next stopProcess to fail."""
        expect(self.rpc.supervisor.stopProcess(pname)).throw(
            xmlrpclib.Fault(42, "Failed to stop the process."))

    def fail_next_start(self, pname):
        """Make next startProcess to fail."""
        expect(self.rpc.supervisor.startProcess(pname)).throw(
            xmlrpclib.Fault(42, "Failed to start the process."))

    def test_restart(self):
        """Test the restart method."""
        expect(self.rpc.supervisor.stopProcess("foo"))
        expect(self.rpc.supervisor.startProcess("foo"))
        with self.mocker:
            self.listener.restart("foo", "testing")
        self.assertTrue(
            self.handler.check_info("Restarting foo (last "
                                    "hearbeat: testing)"))

    def test_restart_fail_stop(self):
        """Test the restart method failing to stop the process."""
        self.fail_next_stop("foo")
        last = time.time()
        with self.mocker:
            try:
                self.listener.restart("foo", last)
            except xmlrpclib.Fault:
                msg = ("Failed to stop process %s (last heartbeat: %s), "
                       "exiting: %s") % \
                    ("foo", last, "<Fault 42: 'Failed to stop the process.'>")
                self.assertTrue(self.handler.check_error(msg))
            else:
                self.fail("Should get an xmlrpclib.Fault")

    def test_restart_fail_start(self):
        """Test the restart method failing to start the process."""
        expect(self.rpc.supervisor.stopProcess("foo"))
        self.fail_next_start("foo")
        last = time.time()
        with self.mocker:
            try:
                self.listener.restart("foo", last)
            except xmlrpclib.Fault:
                msg = (
                    'Failed to start process %s after stopping it, exiting: %s'
                ) % ("foo", "<Fault 42: 'Failed to start the process.'>")
                self.assertTrue(self.handler.check_error(msg))
            else:
                self.fail("Should get an xmlrpclib.Fault")

    def test_check_processes(self):
        """Test the check_processes method."""
        # add the fake process to the process list
        self.processes.append(
            dict(name="foo", group="foo", pid="42", state=RUNNING))
        self.processes.append(
            dict(name="bar", group="bar", pid="43", state=RUNNING))
        self.listener.processes = ['bar']
        # 2 process to restart
        self.listener.data['foo'] = {
            'time': time.time() - (self.listener.timeout + 2)
        }
        self.listener.data['bar'] = {
            'time': time.time() - (self.listener.timeout + 3)
        }
        self.listener.data['p-1'] = {
            'time': time.time() - (self.listener.timeout - 1)
        }
        expect(self.rpc.supervisor.getAllProcessInfo()).result(self.processes)
        expect(self.rpc.supervisor.stopProcess("foo:"))
        expect(self.rpc.supervisor.startProcess("foo:"))
        expect(self.rpc.supervisor.stopProcess("bar:bar"))
        expect(self.rpc.supervisor.startProcess("bar:bar"))
        with self.mocker:
            self.listener.check_processes()

    def test_check_processes_no_data(self):
        """Test the check_processes method with no data of a process."""
        # add the fake process to the process list
        self.processes.append(
            dict(name="foo", group="foo", pid="42", state=RUNNING))
        self.processes.append(
            dict(name="bar", group="bar", pid="43", state=RUNNING))
        self.listener.processes = ['bar']
        expect(self.rpc.supervisor.getAllProcessInfo()).result(self.processes)
        expect(self.rpc.supervisor.stopProcess("foo:"))
        expect(self.rpc.supervisor.startProcess("foo:"))
        expect(self.rpc.supervisor.stopProcess("bar:bar"))
        expect(self.rpc.supervisor.startProcess("bar:bar"))
        with self.mocker:
            # one process to restart
            self.listener.check_processes()
        self.assertTrue(
            self.handler.check_warning(
                "Restarting process foo:foo (42), as we never received a hearbeat"
                " event from it"))
        self.assertTrue(
            self.handler.check_warning(
                "Restarting process bar:bar (43), as we never received a hearbeat"
                " event from it"))

    def test_check_processes_untracked(self):
        """Test the check_processes method with a untracked proccess."""
        # add the fake process to the process list
        self.processes.append(
            dict(name="foo-untracked",
                 group="untracked",
                 pid="43",
                 state=RUNNING))
        # add a new tracked process from an untracked group
        self.processes.append(
            dict(name="bar-untracked", group="bar", pid="44", state=RUNNING))
        self.listener.processes = ['bar']
        expect(self.rpc.supervisor.getAllProcessInfo()).result(self.processes)
        with self.mocker:
            self.listener.check_processes()
        self.assertTrue(
            self.handler.check_info(
                "Ignoring untracked:foo-untracked (43) as isn't tracked."))
        self.assertTrue(
            self.handler.check_info(
                "Ignoring bar:bar-untracked (44) as isn't tracked."))

    def test_check_processes_not_running(self):
        """Test the check_processes method if the proccess isn't running."""
        # add the fake process to the process list
        self.processes.append(
            dict(name="foo",
                 group="foo",
                 pid="42",
                 state=states.ProcessStates.STARTING))
        # add a new tracked process from an untracked group
        self.processes.append(
            dict(name="bar",
                 group="bar",
                 pid="43",
                 state=states.ProcessStates.STARTING))
        self.listener.processes = ['bar']
        # 2 processes to restart
        self.listener.data['foo'] = {
            'time': time.time() - (self.listener.timeout + 2)
        }
        self.listener.data['bar'] = {
            'time': time.time() - (self.listener.timeout + 2)
        }
        expect(self.rpc.supervisor.getAllProcessInfo()).result(self.processes)
        with self.mocker:
            self.listener.check_processes()
        self.assertTrue(
            self.handler.check_info("Ignoring foo:foo (42) as isn't running."))
        self.assertTrue(
            self.handler.check_info("Ignoring bar:bar (43) as isn't running."))

    def test_handle_heartbeat(self):
        """Test handle_heartbeat method."""
        payload = {"time": time.time()}
        self.listener.handle_heartbeat('process_name', 'group_name', '42',
                                       payload)
        info = {
            "pid": "42",
            "time": payload["time"],
            "received": self.listener.data["process_name"]["received"]
        }
        self.assertEqual({"process_name": info}, self.listener.data)

    def test_handle_event(self):
        """Test handle_event method."""
        # patch handle_heartbeat
        called = []

        def handle_heartbeat(process_name, group_name, pid, payload):
            """Fake handle_heartbeat."""
            called.append((process_name, group_name, pid, payload))

        self.listener.handle_heartbeat = handle_heartbeat
        payload_dict = {u"time": time.time(), "type": "heartbeat"}
        raw_data = ("processname:ticker groupname:ticker pid:42\n" +
                    json.dumps(payload_dict))
        raw_header = ("ver:3.0 server:supervisor serial:1 pool:listener "
                      "poolserial:10 eventname:PROCESS_COMMUNICATION_STDOUT"
                      " len:%s\n" % len(raw_data))
        self.stdin.write(raw_header + raw_data)
        self.stdin.seek(0)
        headers = childutils.get_headers(raw_header)
        self.listener._handle_event()
        # check
        self.assertEqual(1, len(called))
        del payload_dict['type']
        self.assertEqual(('ticker', 'ticker', '42', payload_dict), called[0])
        self.assertTrue(
            self.handler.check_debug("Event '%s' received: %r" %
                                     (headers['eventname'], raw_data)))
        # check the stdout info
        self.assertEqual(["READY", "RESULT 2", "OK"],
                         self.stdout.getvalue().split("\n"))

    def test_invalid_event_type(self):
        """Test with an invalid type."""
        payload_dict = {u"time": time.time(), "type": "ping"}
        raw_data = 'processname:ticker groupname:ticker pid:42\n' + \
            json.dumps(payload_dict)
        raw_header = ("ver:3.0 server:supervisor serial:1 pool:listener "
                      "poolserial:10 eventname:PROCESS_COMMUNICATION_STDOUT"
                      " len:%s\n" % len(raw_data))
        self.stdin.write(raw_header + raw_data)
        self.stdin.seek(0)
        self.listener._handle_event()
        # check
        self.assertTrue(
            self.handler.check_error("Unable to handle event type '%s' - %r" %
                                     ('ping', raw_data)))

    def test_invalid_payload(self):
        """Test with an invalid payload."""
        payload_dict = {u"time": time.time(), "type": "ping"}
        raw_data = 'processname:ticker groupname:ticker pid:42\n' + \
            json.dumps(payload_dict) + "<!foo>"
        raw_header = ("ver:3.0 server:supervisor serial:1 pool:listener "
                      "poolserial:10 eventname:PROCESS_COMMUNICATION_STDOUT"
                      " len:%s\n" % len(raw_data))
        self.stdin.write(raw_header + raw_data)
        self.stdin.seek(0)
        self.listener._handle_event()
        # check
        self.assertTrue(
            self.handler.check_error("Unable to handle event type '%s' - %r" %
                                     ('None', raw_data)))

    def test_unhandled_event(self):
        """A unhandled event type."""
        payload_dict = {u"time": time.time(), "type": "ping"}
        raw_data = 'processname:ticker groupname:ticker pid:42\n' + \
            json.dumps(payload_dict)
        raw_header = "ver:3.0 server:supervisor serial:1 pool:heartbeat " + \
            "poolserial:1 eventname:UNKNOWN len:%s\n" % len(raw_data)
        self.stdin.write(raw_header + raw_data)
        self.stdin.seek(0)
        self.listener._handle_event()
        # check
        self.assertTrue(
            self.handler.check_warning("Received unsupported event: %s - %r" %
                                       ('UNKNOWN', raw_data)))

    def test_check_interval(self):
        """Check that we properly check on the specified interval."""
        header = "ver:3.0 server:supervisor serial:1 pool:heartbeat " + \
                 "poolserial:1 eventname:TICK_5 len:0\n"
        expect(self.rpc.supervisor.getAllProcessInfo()).result([])
        self.stdin.write(header)
        self.stdin.seek(0)
        self.listener._handle_event()
        self.assertEqual(self.listener.tick_count, 1)
        self.stdin.seek(0)
        with self.mocker:
            self.listener._handle_event()
Пример #44
0
class RestHelperTestCase(StorageDALTestCase):
    """Test the resthelper."""
    def setUp(self):
        super(RestHelperTestCase, self).setUp()
        self.handler = MementoHandler()
        self.user = self.obj_factory.make_user(1, u"bob", u"bobby boo",
                                               2 * (2**30))
        self.mapper = ResourceMapper()
        logger = logging.getLogger("test")
        logger.addHandler(self.handler)
        logger.setLevel(logging.INFO)
        logger.propagate = False
        self.helper = RestHelper(self.mapper, logger=logger)
        self.store = self.get_shard_store(self.user.shard_id)

    def test_GET_user(self):
        """Test for dao to REST conversion of user"""
        info = self.helper.get_user(self.user)
        self.assertEqual(
            info, self.mapper.user_repr(self.user, self.user.get_quota()))
        user_id = repr(self.user.id)
        self.assertTrue(self.handler.check_info("get_quota", user_id))
        self.assertTrue(self.handler.check_info("get_udfs", user_id))

    def test_GET_user_with_udf(self):
        """Test get_user with udf."""
        udf = self.user.make_udf(u"~/Documents")
        info = self.helper.get_user(self.user)
        self.assertEqual(
            info, self.mapper.user_repr(self.user, self.user.get_quota(),
                                        [udf]))

    def test_GET_volume(self):
        """Test get_volume."""
        volume_path = u"~/Documents"
        udf = self.user.make_udf(volume_path)
        info = self.helper.get_volume(user=self.user, volume_path=volume_path)
        self.assertEqual(info, self.mapper.volume_repr(udf))
        ids = [repr(x) for x in [self.user.id, unicode(volume_path)]]
        self.assertTrue(self.handler.check_info("get_udf_by_path", *ids))

    def test_GET_volume_with_delta0(self):
        """Test get_volume with delta, no nodes"""
        volume_path = u"~/Documents"
        udf = self.user.make_udf(volume_path)
        info = self.helper.get_volume(user=self.user,
                                      volume_path=volume_path,
                                      from_generation=0)
        self.assertEqual(
            info,
            self.mapper.volume_repr(volume=udf, from_generation=0, nodes=[]))
        ids = [repr(x) for x in [self.user.id, unicode(volume_path)]]
        self.assertTrue(self.handler.check_info("get_udf_by_path", *ids))
        ids = [repr(x) for x in [self.user.id, udf.id, 0]]
        self.assertTrue(self.handler.check_info("get_delta", *ids))

    def test_GET_volume_with_delta1(self):
        """Test get_volume with delta, with nodes"""
        volume_path = u"~/Documents"
        self.user.make_udf(volume_path)
        node0 = self.user.make_file_by_path(u"~/Documents/file0.txt")
        node1 = self.user.make_file_by_path(u"~/Documents/file1.txt")
        info = self.helper.get_volume(user=self.user,
                                      volume_path=volume_path,
                                      from_generation=0)
        udf = self.user.get_udf_by_path(u'~/Documents')
        self.assertEqual(
            info,
            self.mapper.volume_repr(volume=udf,
                                    from_generation=0,
                                    nodes=[node0, node1]))
        node0.delete()
        info = self.helper.get_volume(user=self.user,
                                      volume_path=volume_path,
                                      from_generation=0)
        self.assertEqual(info['delta']['nodes'][1]['is_live'], False)

    def test_PUT_volume(self):
        """Test put volume."""
        path = u"~/Documents"
        info = self.helper.put_volume(user=self.user, path=path)
        udf = self.user.get_udf_by_path(path)
        self.assertEquals(self.mapper.volume_repr(udf), info)
        ids = [repr(x) for x in [self.user.id, unicode(path)]]
        self.assertTrue(self.handler.check_info("make_udf", *ids))

    def test_GET_node_directory(self):
        """Test for get_node a directory node."""
        root = self.user.volume().get_root()
        d1 = root.make_subdirectory(u"dir1")
        full_path = u"~/Ubuntu One" + d1.full_path
        info = self.helper.get_node(user=self.user, node_path=full_path)
        self.assertEquals(info, self.mapper.node_repr(d1))

    def test_GET_node_file(self):
        """Test for  get_node conversion of a file node."""
        root = self.user.volume().get_root()
        f1 = root.make_file(u"file.txt")
        volume_path = u"~/Ubuntu One"
        full_path = volume_path + f1.full_path
        info = self.helper.get_node(user=self.user, node_path=full_path)
        self.assertEquals(info, self.mapper.node_repr(f1))
        ids = [repr(x) for x in [self.user.id, full_path, True]]
        self.assertTrue(self.handler.check_info("get_node_by_path", *ids))

    def test_GET_volumes(self):
        """Test get_volume."""
        udfs = [self.user.make_udf(u"~/Udf%s" % i) for i in range(10)]
        info = self.helper.get_volumes(self.user)
        root = self.user.volume().get_volume()
        expected_repr = [self.mapper.volume_repr(root)]
        expected_repr.extend([self.mapper.volume_repr(u) for u in udfs])
        info = info.sort(key=operator.itemgetter('path'))
        expected_repr = expected_repr.sort(key=operator.itemgetter('path'))
        self.assertEquals(info, expected_repr)
        self.assertTrue(
            self.handler.check_info("get_volume", repr(self.user.id)))
        self.assertTrue(self.handler.check_info("get_udfs",
                                                repr(self.user.id)))

    def test_DELETE_volume(self):
        """Test delete_volume."""
        udf = self.user.make_udf(u"~/Documents")
        self.helper.delete_volume(self.user, udf.path)
        self.assertRaises(errors.DoesNotExist, self.user.get_udf, udf.id)
        ids = [repr(x) for x in [self.user.id, udf.path]]
        self.assertTrue(self.handler.check_info("get_udf_by_path", *ids))
        ids = [repr(x) for x in [self.user.id, udf.id]]
        self.assertTrue(self.handler.check_info("delete_udf", *ids))

    def test_GET_node0(self):
        """Test simple node info."""
        root = self.user.volume().get_root()
        f1 = root.make_file(u"file.txt")
        full_path = u"~/Ubuntu One" + f1.full_path
        info = self.helper.get_node(self.user, full_path)
        self.assertEqual(info, self.mapper.node_repr(f1))

    def test_GET_node1(self):
        """Test child node info."""
        root = self.user.volume().get_root()
        d1 = root.make_subdirectory(u"Documents")
        f1 = d1.make_file(u"file.txt")
        full_path = u"~/Ubuntu One" + os.path.join(d1.full_path, f1.name)
        info = self.helper.get_node(self.user, full_path)
        self.assertEquals(info['key'], f1.nodekey)
        self.assertEquals(info['path'], f1.full_path)

    def test_GET_node2(self):
        """Test simple udf node info."""
        self.user.make_udf(u"~/Documents")
        udf = self.user.get_node_by_path(u"~/Documents")
        f1 = udf.make_file(u"file.txt")
        full_path = u"~/Documents" + f1.full_path
        info = self.helper.get_node(self.user, full_path)
        self.assertEquals(info['key'], f1.nodekey)
        self.assertEquals(info['path'], f1.full_path)

    def test_GET_node3(self):
        """Test child udf node info."""
        self.user.make_udf(u"~/Documents")
        udf = self.user.get_node_by_path(u"~/Documents")
        d1 = udf.make_subdirectory(u"slides")
        f1 = d1.make_file(u"file.txt")
        full_path = u"~/Documents" + f1.full_path
        info = self.helper.get_node(self.user, full_path)
        self.assertEqual(info, self.mapper.node_repr(f1))

    def test_DELETE_node(self):
        """Test delete_volume."""
        root = self.user.volume().get_root()
        f1 = root.make_file(u"file.txt")
        full_path = u"~/Ubuntu One" + f1.full_path
        self.helper.delete_node(self.user, full_path)
        self.assertRaises(errors.DoesNotExist,
                          self.user.volume().get_node, f1.id)
        ids = [repr(x) for x in [self.user.id, full_path]]
        self.assertTrue(self.handler.check_info("get_node_by_path", *ids))
        ids = [repr(x) for x in [self.user.id, f1.id, True]]
        self.assertTrue(self.handler.check_info("delete", *ids))

    def test_GET_node_children(self):
        """Test get_node_children."""
        root = self.user.volume().get_root()
        files = [root.make_file(u"file%s.txt" % i) for i in range(10)]
        full_path = u"~/Ubuntu One"
        root.load()
        expected = self.mapper.node_repr(root)
        expected['children'] = [self.mapper.node_repr(n) for n in files]
        info = self.helper.get_node(self.user,
                                    full_path,
                                    include_children=True)
        self.assertEquals(info, expected)
        ids = [repr(x) for x in [self.user.id, full_path, True]]
        self.assertTrue(self.handler.check_info("get_node", *ids))
        ids = [repr(x) for x in [self.user.id, root.id, True]]
        self.assertTrue(self.handler.check_info("get_children", *ids))

    def test_GET_file_node_children(self):
        """Test get_node_children."""
        self.user.volume().root.make_file(u"file.txt")
        self.assertRaises(FileNodeHasNoChildren,
                          self.helper.get_node,
                          self.user,
                          "~/Ubuntu One/file.txt",
                          include_children=True)

    def test_PUT_node_is_public(self):
        """Test put node to make existing file public."""
        original_metrics = self.helper.metrics
        self.helper.metrics = FakeMetrics()
        new_file_path = u"~/Ubuntu One/a/b/c/file.txt"
        node = self.user.make_file_by_path(new_file_path)
        self.assertEqual(node.is_public, False)
        node_rep = self.mapper.node_repr(node)
        node_rep['is_public'] = True
        info = self.helper.put_node(self.user, new_file_path, node_rep)

        ids = [repr(x) for x in [self.user.id, new_file_path]]
        self.assertTrue(self.handler.check_info("get_node_by_path", *ids))
        ids = [repr(x) for x in [self.user.id, node.id, True]]
        self.assertTrue(self.handler.check_info("change_public_access", *ids))

        node.load()
        self.assertEqual(node.is_public, True)
        self.assertEqual(info, self.mapper.node_repr(node))
        info['is_public'] = False
        info = self.helper.put_node(self.user, new_file_path, info)
        node.load()
        self.assertEqual(node.is_public, False)
        self.assertEqual(info, self.mapper.node_repr(node))
        self.helper.metrics.make_all_assertions(
            self, 'resthelper.put_node.change_public')
        self.helper.metrics = original_metrics

    def test_GET_public_files(self):
        """Test public_files returns the list of public files."""
        self.assertEqual(self.helper.get_public_files(self.user), [])
        new_file_path = u"~/Ubuntu One/a/b/c/file.txt"
        node = self.user.make_file_by_path(new_file_path)
        self.assertEqual(node.is_public, False)
        node_rep = self.mapper.node_repr(node)
        node_rep['is_public'] = True
        info = self.helper.put_node(self.user, new_file_path, node_rep)
        self.assertEqual(self.helper.get_public_files(self.user), [info])
        self.assertTrue(
            self.handler.check_info("get_public_files", repr(self.user.id)))

    def test_PUT_node_is_public_directory(self):
        """Test put node to make existing file public."""
        dir_path = u"~/Ubuntu One/a/b/c"
        node = self.user.make_tree_by_path(dir_path)
        self.assertEqual(node.is_public, False)
        node_rep = self.mapper.node_repr(node)
        node_rep['is_public'] = True
        self.assertRaises(CannotPublishDirectory, self.helper.put_node,
                          self.user, dir_path, node_rep)

    def test_PUT_node_path(self):
        """Test put node with a new path."""
        new_file_path = u"~/Ubuntu One/a/b/c/file.txt"
        node = self.user.make_file_by_path(new_file_path)
        self.assertEqual(node.full_path, "/a/b/c/file.txt")
        node_rep = self.mapper.node_repr(node)
        new_path = "/a/newfile.txt"
        node_rep['path'] = new_path
        info = self.helper.put_node(self.user, new_file_path, node_rep)
        node.load()
        self.assertEqual(node.full_path, new_path)
        self.assertEqual(info, self.mapper.node_repr(node))
        ids = [repr(x) for x in [self.user.id, new_file_path]]
        self.assertTrue(self.handler.check_info("get_node_by_path", *ids))
        new_dir, new_name = os.path.split(new_path)
        ids = [repr(x) for x in [self.user.id, node.vol_id, unicode(new_dir)]]
        self.assertTrue(self.handler.check_info("get_node_by_path", *ids))
        ids = [repr(x) for x in [self.user.id, node.id, unicode(new_name)]]
        self.assertTrue(self.handler.check_info("move", *ids))

    def test_PUT_node_path_is_public(self):
        """Test put node with a new path and make it public."""
        new_file_path = u"~/Ubuntu One/a/b/c/file.txt"
        node = self.user.make_file_by_path(new_file_path)
        self.assertEqual(node.full_path, "/a/b/c/file.txt")
        node_rep = self.mapper.node_repr(node)
        node_rep['path'] = "/a/newfile.txt"
        node_rep['is_public'] = True
        info = self.helper.put_node(self.user, new_file_path, node_rep)
        node.load()
        self.assertEqual(node.is_public, True)
        self.assertEqual(node.full_path, "/a/newfile.txt")
        self.assertEqual(info, self.mapper.node_repr(node))

    def test_PUT_node_is_public_partial(self):
        """Test put node."""
        new_file_path = u"~/Ubuntu One/a/b/c/file.txt"
        node = self.user.make_file_by_path(new_file_path)
        self.assertEqual(node.is_public, False)
        info = self.helper.put_node(self.user, new_file_path,
                                    {'is_public': True})
        node.load()
        self.assertEqual(node.is_public, True)
        self.assertEqual(info, self.mapper.node_repr(node))
        info = self.helper.put_node(self.user, new_file_path,
                                    {'is_public': False})
        node.load()
        self.assertEqual(node.is_public, False)
        self.assertEqual(info, self.mapper.node_repr(node))

    def test_PUT_node_path_partial(self):
        """Test put node with a new path with partial info."""
        new_file_path = u"~/Ubuntu One/a/b/c/file.txt"
        node = self.user.make_file_by_path(new_file_path)
        info = self.helper.put_node(self.user, new_file_path,
                                    {'path': "/a/newfile.txt"})
        node.load()
        self.assertEqual(node.full_path, "/a/newfile.txt")
        self.assertEqual(info, self.mapper.node_repr(node))

    def test_PUT_node_path_is_pulic_partial(self):
        """Test put node with a new path and make it public."""
        new_file_path = u"~/Ubuntu One/a/b/c/file.txt"
        node = self.user.make_file_by_path(new_file_path)
        info = self.helper.put_node(self.user, new_file_path, {
            'path': "/a/newfile.txt",
            'is_public': True
        })
        node.load()
        self.assertEqual(node.full_path, "/a/newfile.txt")
        self.assertEqual(info, self.mapper.node_repr(node))

    def test_PUT_node_do_nothing(self):
        """Test put_node with nothing to do."""
        new_file_path = u"~/Ubuntu One/a/b/c/file.txt"
        node = self.user.make_file_by_path(new_file_path)
        node_repr = self.mapper.node_repr(node)
        info = self.helper.put_node(self.user, new_file_path,
                                    dict(a=2, b='hi', c='ignored'))
        node.load()
        # here nothing is changed and the info returned
        # matches the existing node_repr
        self.assertEqual(info, node_repr)
        self.assertEqual(node_repr, self.mapper.node_repr(node))

    def test_PUT_node_new_file_magic(self):
        """Test put_node to make a new file with content."""
        cb = get_test_contentblob("FakeContent")
        cb.magic_hash = 'magic'
        self.store.add(cb)
        self.store.commit()
        new_file_path = u"~/Ubuntu One/a/b/c/file.txt"
        info = self.helper.put_node(self.user, new_file_path, {
            'kind': 'file',
            'hash': cb.hash,
            'magic_hash': 'magic'
        })
        node = self.user.get_node_by_path(new_file_path)
        self.assertEqual(node.kind, 'File')
        self.assertEqual(node.full_path, '/a/b/c/file.txt')
        self.assertEqual(info, self.mapper.node_repr(node))

    def test_PUT_node_update_file_magic(self):
        """Test put_node to make a new file with content."""
        cb = get_test_contentblob("FakeContent")
        cb.magic_hash = 'magic'
        self.store.add(cb)
        self.store.commit()
        new_file_path = u"~/Ubuntu One/a/b/c/file.txt"
        info = self.helper.put_node(self.user, new_file_path, {
            'kind': 'file',
            'hash': cb.hash,
            'magic_hash': 'magic'
        })
        cb = get_test_contentblob("NewFakeContent")
        cb.magic_hash = 'magic2'
        self.store.add(cb)
        self.store.commit()
        info = self.helper.put_node(self.user, new_file_path, {
            'kind': 'file',
            'hash': cb.hash,
            'magic_hash': 'magic2'
        })
        node = self.user.get_node_by_path(new_file_path, with_content=True)
        self.assertEqual(node.kind, 'File')
        self.assertEqual(node.full_path, '/a/b/c/file.txt')
        self.assertEqual(info, self.mapper.node_repr(node))
        self.assertEqual(node.content.magic_hash, 'magic2')

    def test_PUT_node_new_file(self):
        """Test put_node to make a new file."""
        new_file_path = u"~/Ubuntu One/a/b/c/file.txt"
        info = self.helper.put_node(self.user, new_file_path, {'kind': 'file'})
        node = self.user.get_node_by_path(new_file_path)
        self.assertEqual(node.kind, 'File')
        self.assertEqual(node.full_path, '/a/b/c/file.txt')
        self.assertEqual(info, self.mapper.node_repr(node))

    def test_PUT_node_new_directory(self):
        """Test put_node to make a new directory."""
        new_file_path = u"~/Ubuntu One/a/b/c/file.txt"
        info = self.helper.put_node(self.user, new_file_path,
                                    {'kind': 'directory'})
        node = self.user.get_node_by_path(new_file_path)
        self.assertEqual(node.kind, 'Directory')
        self.assertEqual(node.full_path, '/a/b/c/file.txt')
        self.assertEqual(info, self.mapper.node_repr(node))

    def test_PUT_node_exceptions(self):
        """Test put_node exceptions."""
        self.assertRaises(InvalidKind, self.helper.put_node, self.user,
                          "~/Ubuntu one/x", {"kind": "ABC"})
        # PUT to a non existent node.
        self.assertRaises(errors.DoesNotExist, self.helper.put_node, self.user,
                          "~/Ubuntu/x", {})
        # PUT to a non existent node.
        self.assertRaises(errors.DoesNotExist, self.helper.put_node, self.user,
                          "~/Ubuntu One/x", {})
Пример #45
0
class ClientDummyAuthTests(AuthenticationBaseTestCase):
    """Client authentication tests using the dummy auth provider."""

    auth_provider_class = DummyAuthProvider

    @defer.inlineCallbacks
    def setUp(self):
        yield super(ClientDummyAuthTests, self).setUp()
        self.creds = 'open sesame'
        self.bad_creds = 'not my secret'
        self.handler = MementoHandler()
        logger = logging.getLogger('storage.server')
        logger.addHandler(self.handler)
        self.addCleanup(logger.removeHandler, self.handler)
        self.handler.setLevel(logging.DEBUG)

    def assert_auth_ok_logging(self):
        self.assertTrue(
            self.handler.check_debug("authenticated user", "OK",
                                     self.usr0.username))
        self.assertFalse(self.handler.check_warning("missing user"))

    def assert_auth_ok_missing_user(self):
        self.assertTrue(
            self.handler.check_debug("missing user", "(id=%s)" % self.usr0.id))
        self.assertFalse(self.handler.check_info("authenticated user"))

    @defer.inlineCallbacks
    def test_auth_ok_user_ok(self):
        """Correct authentication must succeed."""
        yield self.callback_test(self.do_auth,
                                 credentials=self.creds,
                                 add_default_callbacks=True)
        self.assert_auth_ok_logging()

    @defer.inlineCallbacks
    def test_auth_ok_bad_user(self):
        """Non existing user must fail authentication."""
        # make the user getter fail
        self.patch(self.service.factory.content, 'get_user_by_id',
                   lambda *a, **k: defer.fail(DoesNotExist()))

        d = self.callback_test(self.do_auth,
                               credentials=self.creds,
                               add_default_callbacks=True)
        yield self.assertFailure(d, protocol_errors.AuthenticationFailedError)

        self.assert_auth_ok_missing_user()

    @defer.inlineCallbacks
    def test_auth_ok_with_session_id(self):
        """Correct authentication must succeed and include the session_id."""
        auth_request = yield self.callback_test(self.do_auth,
                                                credentials=self.creds,
                                                add_default_callbacks=True)

        protocol = self.service.factory.protocols[0]
        self.assertEqual(auth_request.session_id, str(protocol.session_id))

    @defer.inlineCallbacks
    def test_auth_ok_with_metadata(self):
        """Correct authentication must succeed and include metadata."""
        m_called = []
        self.service.factory.metrics.meter = lambda *a: m_called.append(a)

        metadata = {u"platform": u"linux2", u"version": u"1.0", u"foo": u"bar"}
        yield self.callback_test(self.do_auth,
                                 credentials=self.creds,
                                 metadata=metadata,
                                 add_default_callbacks=True)

        self.assertTrue(
            self.handler.check_info("Client metadata: %s" % metadata))
        self.assertIn(("client.platform.linux2", 1), m_called)
        self.assertIn(("client.version.1_0", 1), m_called)
        self.assertNotIn(("client.foo.bar", 1), m_called)

    def test_auth_fail(self):
        """Wrong secret must fail."""
        def test(client, **kwargs):
            d = self.do_auth(client, credentials=self.bad_creds)
            d.addCallbacks(
                lambda _: client.test_fail(Exception("Should not succeed.")),
                lambda _: client.test_done("ok"))

        return self.callback_test(test)

    def test_get_root(self):
        """Must receive the root after authentication."""
        @defer.inlineCallbacks
        def test(client, **kwargs):
            yield self.do_auth(client, credentials=self.creds)
            root_id = yield client.get_root()
            self.assertIsNotNone(root_id)

        return self.callback_test(test, add_default_callbacks=True)

    def test_get_root_twice(self):
        """Get root must keep the root id."""
        @defer.inlineCallbacks
        def test(client, **kwargs):
            yield self.do_auth(client, credentials=self.creds)
            root_id1 = yield client.get_root()
            root_id2 = yield client.get_root()
            self.assertEqual(root_id1, root_id2)

        return self.callback_test(test, add_default_callbacks=True)

    def test_user_becomes_inactive(self):
        """After StorageUser authentication ok it becomes inactive."""
        @defer.inlineCallbacks
        def test(client):
            """Test."""
            yield self.do_auth(client, credentials=self.creds)
            root_id = yield client.get_root()

            # create one file, should be ok
            yield client.make_file(request.ROOT, root_id, "f1")

            # cancel user subscription, so it needs
            # to get it again from the DB
            self.usr0.update(subscription=False)

            # create second file, should NOT be ok
            try:
                yield client.make_file(request.ROOT, root_id, "f2")
            except protocol_errors.DoesNotExistError:
                pass  # failed as we expected
            else:
                client.test_fail("It should have failed!")

        return self.callback_test(test, add_default_callbacks=True)
Пример #46
0
class HeartbeatWriterTest(TwistedTestCase):
    """Tests for HeartbeatWriter."""

    interval = 5

    @defer.inlineCallbacks
    def setUp(self):
        yield super(HeartbeatWriterTest, self).setUp()
        self.logger = logging.Logger("HeartbeatWriter.test")
        self.handler = MementoHandler()
        self.logger.addHandler(self.handler)
        self.addCleanup(self.logger.removeHandler, self.handler)
        self.clock = task.Clock()
        self.hw = HeartbeatWriter(self.interval,
                                  self.logger,
                                  reactor=self.clock)

    def test_send_no_transport(self):
        """Log a warning when there is no transport."""
        self.hw.send()
        self.assertTrue(
            self.handler.check_warning(
                "Can't send heartbeat without a transport"))

    def test_send_loop(self):
        """Send heartbeats in the LoopingCall."""
        # first connect to something
        transport = StringIO()
        self.clock.advance(2)
        self.hw.makeConnection(transport)
        self.clock.advance(5)
        self.clock.advance(5)
        # we should have 3 heartbeats in the transport, get them
        raw_events = transport.getvalue().split(BEGIN_TOKEN, 3)
        events = []
        for raw_event in raw_events:
            if raw_event:
                events.append(json.loads(raw_event.strip(END_TOKEN)))
        # strip the tokens
        for i, timestamp in [(0, 2), (1, 7), (2, 12)]:
            self.assertEqual(events[i]['type'], "heartbeat")
            self.assertEqual(events[i]['time'], timestamp)

    def test_send_on_connectionMade(self):
        """On connectionMade start the loop and send."""
        # first connect to something
        transport = StringIO()
        self.clock.advance(0.1)
        self.hw.makeConnection(transport)
        self.assertTrue(self.hw.loop.running)
        raw_event = transport.getvalue()
        self.assertTrue(raw_event.startswith(BEGIN_TOKEN))
        self.assertTrue(raw_event.endswith(END_TOKEN))
        # strip the tokens
        payload = json.loads(raw_event.strip(BEGIN_TOKEN).strip(END_TOKEN))
        self.assertEqual(payload['type'], "heartbeat")
        self.assertEqual(payload['time'], self.clock.seconds())

    def test_connectionLost(self):
        """On connectionLost cleanup everything."""
        self.hw.makeConnection(None)
        called = []
        self.patch(self.hw.loop, 'stop', lambda: called.append(True))
        self.hw.connectionLost(protocol.connectionDone)
        self.assertTrue(
            self.handler.check_info("HeartbeatWriter connectionLost: %s" %
                                    (protocol.connectionDone, )))
        self.assertTrue(called)
        self.assertEqual(self.hw.loop, None)
        self.assertEqual(self.hw.reactor, None)
        self.assertEqual(self.hw.logger, None)