class SSLTestCase(BaseSSLTestCase):
    """Test error handling when dealing with ssl."""

    @defer.inlineCallbacks
    def setUp(self):
        """Set the diff tests."""
        yield super(SSLTestCase, self).setUp()

        self.memento = MementoHandler()
        self.memento.setLevel(logging.DEBUG)
        logger = webclient.webclient_module().logger
        logger.addHandler(self.memento)
        self.addCleanup(logger.removeHandler, self.memento)

        self.wc = webclient.webclient_factory()
        self.addCleanup(self.wc.shutdown)

        self.called = []

    def test_ssl_fail(self):
        """Test showing the dialog and rejecting."""
        self.failUnlessFailure(self.wc.request(
                self.base_iri + SIMPLERESOURCE), WebClientError)
        self.assertNotEqual(None, self.memento.check_error('SSL errors'))

    if (WEBCLIENT_MODULE_NAME.endswith(".txweb") or
            WEBCLIENT_MODULE_NAME.endswith(".libsoup")):
        reason = 'SSL support has not yet been implemented.'
        test_ssl_fail.skip = reason
        def auth(client):

            yield client.dummy_authenticate('open sesame')
            d = defer.Deferred()
            client.connectionLostHandler = d.callback
            # add the log handler
            logger = logging.getLogger('storage.server')
            hdlr = MementoHandler()
            hdlr.setLevel(logging.INFO)
            logger.addHandler(hdlr)
            # patch the looping ping values
            server = self.service.factory.protocols[0]
            server.ping_loop.interval = 0.1
            server.ping_loop.idle_timeout = 0.3
            # reschedule the ping loop
            server.ping_loop.reset()
            try:
                yield d
            except ConnectionDone:
                msg = "Disconnecting - idle timeout"
                self.assertTrue(hdlr.check_info(msg))
            else:
                self.fail("Should get disconnected.")
            finally:
                logger.removeHandler(hdlr)
class BaseEQTestCase(BaseTwistedTestCase):
    """ Setup an EQ for test. """

    _monitor_class = FakeMonitor

    @defer.inlineCallbacks
    def setUp(self):
        """Setup the test."""
        yield super(BaseEQTestCase, self).setUp()
        self.fsmdir = self.mktemp('fsmdir')
        self.partials_dir = self.mktemp('partials_dir')
        self.root_dir = self.mktemp('root_dir')
        self.vm = FakeVolumeManager(self.root_dir)
        self.db = tritcask.Tritcask(self.mktemp('tritcask'))
        self.addCleanup(self.db.shutdown)
        self.fs = filesystem_manager.FileSystemManager(self.fsmdir,
                                                       self.partials_dir,
                                                       self.vm, self.db)
        self.fs.create(path=self.root_dir,
                       share_id='', is_dir=True)
        self.fs.set_by_path(path=self.root_dir,
                              local_hash=None, server_hash=None)
        self.eq = event_queue.EventQueue(self.fs,
                                         monitor_class=self._monitor_class)
        self.eq.listener_map = {}
        self.addCleanup(self.eq.shutdown)
        self.fs.register_eq(self.eq)

        # add a Memento handler to the logger
        self.log_handler = MementoHandler()
        self.log_handler.setLevel(logging.DEBUG)
        self.eq.log.addHandler(self.log_handler)
    def test_disconnect_with_user_locked_after_auth(self):
        """Client gets disconnected if the user is locked after auth."""
        # add the log handler
        logger = logging.getLogger('storage.server')
        hdlr = MementoHandler()
        hdlr.setLevel(logging.INFO)
        logger.addHandler(hdlr)
        # define a connectionLostHandler to know when the client
        # gets disconnected.
        d = defer.Deferred()

        def conn_lost_handler(r):
            """Connection lost!"""
            d.callback(None)

        @defer.inlineCallbacks
        def dummy(client):
            # set the connection lost handler
            client.connectionLostHandler = conn_lost_handler
            # trigger an operation, which should
            yield client.dummy_authenticate("open sesame")
            root_id = yield client.get_root()
            # lock the user:
            usr = self.user_store.get(model.StorageUser, 0)
            usr.locked = True
            self.user_store.commit()
            client.make_dir(request.ROOT, root_id, u"open sesame")
            yield d
            # check we logged a warning about this.
            self.assertTrue(
                hdlr.check_warning("Shutting down protocol: user locked"))

        return self.callback_test(dummy, add_default_callbacks=True)
    def test_disconnect_with_user_locked_after_auth(self):
        """Client gets disconnected if the user is locked after auth."""
        # add the log handler
        logger = logging.getLogger('storage.server')
        hdlr = MementoHandler()
        hdlr.setLevel(logging.INFO)
        logger.addHandler(hdlr)
        # define a connectionLostHandler to know when the client
        # gets disconnected.
        d = defer.Deferred()

        def conn_lost_handler(r):
            """Connection lost!"""
            d.callback(None)

        @defer.inlineCallbacks
        def dummy(client):
            # set the connection lost handler
            client.connectionLostHandler = conn_lost_handler
            # trigger an operation, which should
            yield client.dummy_authenticate("open sesame")
            root_id = yield client.get_root()
            # lock the user:
            usr = self.store.get(StorageUser, 0)
            usr.locked = True
            self.store.commit()
            client.make_dir(request.ROOT, root_id, u"open sesame")
            yield d
            # check we logged a warning about this.
            self.assertTrue(hdlr.check_warning(
                "Shutting down protocol: user locked"))
        return self.callback_test(dummy, add_default_callbacks=True)
        def auth(client):

            yield client.dummy_authenticate('open sesame')
            d = defer.Deferred()
            client.connectionLostHandler = d.callback
            # add the log handler
            logger = logging.getLogger('storage.server')
            hdlr = MementoHandler()
            hdlr.setLevel(logging.INFO)
            logger.addHandler(hdlr)
            # patch the looping ping values
            server = self.service.factory.protocols[0]
            server.ping_loop.interval = 0.1
            server.ping_loop.idle_timeout = 0.3
            # reschedule the ping loop
            server.ping_loop.reset()
            try:
                yield d
            except ConnectionDone:
                msg = "Disconnecting - idle timeout"
                self.assertTrue(hdlr.check_info(msg))
            else:
                self.fail("Should get disconnected.")
            finally:
                logger.removeHandler(hdlr)
 def test_message(self):
     """Just a message."""
     handler = MementoHandler()
     handler.setLevel(logging.DEBUG)
     deferror_handler(dict(isError=True, message="foobar"))
     self.assertFalse(handler.check_error("Unhandled error in deferred",
                                          "foobar"))
class FilterTests(unittest.TestCase):
    """Tests log filters"""

    @defer.inlineCallbacks
    def setUp(self):
        """Setup the logger and the handler"""
        yield super(FilterTests, self).setUp()
        self.handler = MementoHandler()
        self.handler.setLevel(logging.DEBUG)
        root_logger.addHandler(self.handler)
        self.addCleanup(root_logger.removeHandler, self.handler)

        if filesystem_logger is not None:
            filesystem_logger.addHandler(self.handler)
            self.addCleanup(filesystem_logger.removeHandler, self.handler)

        twisted_logger.addHandler(self.handler)
        self.addCleanup(twisted_logger.removeHandler, self.handler)

        self.addCleanup(self.handler.close)

    @skipIfOS('win32', 'There is not filesystem_logger implementation in '\
                       'windows yet, see bug #823316.')
    def test_multiple_filters(self):
        """Tests logging with more than one filter."""
        test_logger = logging.getLogger('ubuntuone.SyncDaemon.FilterTest')
        test_logger.debug('debug info 0')
        self.assertEquals(1, len(self.handler.records))
        self.handler.addFilter(MultiFilter(['ubuntuone.SyncDaemon', 'twisted', 'pyinotify']))
        test_logger.debug('debug info 1')
        self.assertEquals(2, len(self.handler.records))
 def test_failure(self):
     """Received a full failure."""
     handler = MementoHandler()
     handler.setLevel(logging.DEBUG)
     f = failure.Failure(ValueError('foobar'))
     deferror_handler(dict(isError=True, failure=f, message=''))
     self.assertFalse(handler.check_error("Unhandled error in deferred",
                                          "ValueError", "foobar"))
class OSWrapperTests(test_os_helper.OSWrapperTests):
    """Tests for os wrapper functions."""

    @defer.inlineCallbacks
    def setUp(self, test_dir_name=None, test_file_name=None,
              valid_file_path_builder=None):
        """Set up."""
        yield super(OSWrapperTests, self).setUp(
            test_dir_name=test_dir_name, test_file_name=test_file_name,
            valid_file_path_builder=None)
        self.handler = MementoHandler()
        self.handler.setLevel(logging.DEBUG)
        self._logger = logging.getLogger('ubuntuone.SyncDaemon')
        self._logger.addHandler(self.handler)
        self.addCleanup(self._logger.removeHandler, self.handler)
        self.patch(darwin.shutil, "move", self._fake_move)

    def _fake_move(*args):
        """Fake shutil move."""
        raise Exception("Fail fake move")

    def test_stat_symlink(self):
        """Test that it doesn't follow symlinks.

        We compare the inode only (enough to see if it's returning info
        from the link or the linked), as we can not compare the full stat
        because the st_mode will be different.
        """
        link = os.path.join(self.basedir, 'foo')
        os.symlink(self.testfile, link)
        self.assertNotEqual(os.stat(link).st_ino, stat_path(link).st_ino)
        self.assertEqual(os.lstat(link).st_ino, stat_path(link).st_ino)

    def test_movetotrash_file_bad(self):
        """Something bad happen when moving to trash, removed anyway."""
        path = os.path.join(self.basedir, 'foo')
        open_file(path, 'w').close()
        move_to_trash(path)
        self.assertFalse(os.path.exists(path))
        self.assertTrue(self.handler.check_warning("Problems moving to trash!",
                                                   "Removing anyway", "foo"))

    def test_movetotrash_file_not_exists(self):
        """Something bad happen when moving to trash, removed anyway."""
        path = os.path.join(self.basedir, 'foo2')
        self.assertFalse(os.path.exists(path))
        self.assertRaises(OSError, move_to_trash, path)

    def test_movetotrash_dir_bad(self):
        """Something bad happen when moving to trash, removed anyway."""
        path = os.path.join(self.basedir, 'foo')
        os.mkdir(path)
        open_file(os.path.join(path, 'file inside directory'), 'w').close()
        move_to_trash(path)
        self.assertFalse(os.path.exists(path))
        self.assertTrue(self.handler.check_warning("Problems moving to trash!",
                                                   "Removing anyway", "foo"))
class RemovableSignalTestCase(TestCase):
    """Tests for RemovableSignal."""

    @defer.inlineCallbacks
    def setUp(self):
        yield super(RemovableSignalTestCase, self).setUp()
        self.proxy = FakeSSOProxy()

    def test_creation(self):
        """When creating, bind properly to self.proxy."""
        rs = RemovableSignal(self.proxy, "test", lambda *a: None)
        self.assertIs(self.proxy.test, rs)

    def test_dunder_callable(self):
        """__call__ works as expected."""
        sample_store = []
        expected = object()
        test_cb = lambda res: sample_store.append(res)
        rs = RemovableSignal(self.proxy, "on_credentials_found_cb", test_cb)
        rs(APP_NAME, expected)
        self.assertEqual(sample_store, [expected])

    def test_callable_does_not_log_args(self):
        """__call__ does not log its arguments."""
        self.handler = MementoHandler()
        self.handler.setLevel(logging.DEBUG)
        logger.addHandler(self.handler)
        self.addCleanup(logger.removeHandler, self.handler)

        secret_token = "secret token!"
        test_cb = lambda _: None
        rs = RemovableSignal(self.proxy, "on_credentials_found_cb", test_cb)

        rs(APP_NAME, {"secret": secret_token})
        for record in self.handler.records:
            self.assertNotIn(secret_token, record.message)

    def test_dunder_filters_other_apps(self):
        """__call__ filters by app_name."""
        sample_store = []
        test_cb = lambda res: sample_store.append(res)
        rs = RemovableSignal(self.proxy, "on_credentials_found_cb", test_cb)
        rs('other app name', object())
        self.assertEqual(sample_store, [])

    def test_remove(self):
        """The signal has a .remove that removes the callback."""
        sample_store = []
        test_cb = lambda app_name, creds: sample_store.append(creds)
        rs = RemovableSignal(self.proxy, "on_credentials_found_cb", test_cb)
        rs.remove()
        rs(TEST_APP_NAME, TEST_CREDENTIALS)
        self.assertEqual(len(sample_store), 0)
class GetProjectDirTestCase(TestCase):
    """Test case for get_project_dir when constants module is not defined."""

    DIR_NAME = utils.DATA_SUFFIX
    DIR_CONSTANT = 'PROJECT_DIR'
    DIR_GETTER = 'get_project_dir'

    @defer.inlineCallbacks
    def setUp(self):
        yield super(GetProjectDirTestCase, self).setUp()
        self._constants = sys.modules.get(CONSTANTS_MODULE, NOT_DEFINED)
        sys.modules[CONSTANTS_MODULE] = None  # force ImportError

        self.memento = MementoHandler()
        self.memento.setLevel(logging.DEBUG)
        utils.logger.addHandler(self.memento)
        self.addCleanup(utils.logger.removeHandler, self.memento)

        self.get_dir = getattr(utils, self.DIR_GETTER)

    @defer.inlineCallbacks
    def tearDown(self):
        if self._constants is not NOT_DEFINED:
            sys.modules[CONSTANTS_MODULE] = self._constants
        else:
            sys.modules.pop(CONSTANTS_MODULE)
        yield super(GetProjectDirTestCase, self).tearDown()

    def test_get_dir_relative(self):
        """The relative path for the data directory is correctly retrieved."""
        module = utils.os.path.dirname(utils.__file__)
        rel_data = utils.os.path.join(module,
                                      utils.os.path.pardir,
                                      utils.os.path.pardir,
                                      self.DIR_NAME)
        expected_dir = utils.os.path.abspath(rel_data)

        # ensure expected_path exists at os level
        self.patch(utils.os.path, 'exists', lambda path: path == expected_dir)

        result = self.get_dir()
        self.assertEqual(expected_dir, result)

    def test_get_dir_none_exists(self):
        """No data directory exists, return None and log as error."""
        self.patch(utils.os.path, 'exists', lambda path: False)
        sys.modules[CONSTANTS_MODULE] = None

        self.assertRaises(AssertionError, self.get_dir)
        msg = 'get_dir: can not build a valid path.'
        self.assertTrue(self.memento.check_error(msg))
    def test_gsettings_cannot_parse(self):
        """Some weird setting that cannot be parsed is logged with warning."""
        memento = MementoHandler()
        memento.setLevel(logging.DEBUG)
        gsettings.logger.addHandler(memento)
        self.addCleanup(gsettings.logger.removeHandler, memento)

        troublesome_value = "#bang"
        template_values = dict(BASE_GSETTINGS_VALUES)
        template_values["ignore_hosts"] = troublesome_value
        fake_output = TEMPLATE_GSETTINGS_OUTPUT.format(**template_values)
        self.patch(gsettings.subprocess, "check_output",
                   lambda _: fake_output)
        ps = gsettings.get_proxy_settings()
        self.assertTrue(memento.check_warning(gsettings.CANNOT_PARSE_WARNING %
                                              troublesome_value))
        self.assertEqual(ps, {})
    def test_called_back_log_ok(self):
        """Test that the hasher produces correct info."""
        # create the hasher
        mark = object()
        queue = hash_queue.UniqueQueue()
        d = defer.Deferred()
        class Helper(object):
            """Helper class."""
            def push(self, event, **kwargs):
                """Callback."""
                d.callback(kwargs)
        receiver = Helper()
        hasher = hash_queue._Hasher(queue, mark, receiver)

        # log config
        handler = MementoHandler()
        handler.setLevel(logging.DEBUG)
        hasher.logger.addHandler(handler)

        # send what to hash
        testfile = os.path.join(self.test_dir, "testfile")
        with open_file(testfile, "wb") as fh:
            fh.write("foobar")
        item = ((testfile, "mdid"), FAKE_TIMESTAMP)
        queue.put(item)

        # start the hasher after putting the work items
        hasher.start()

        # wait event and stop hasher
        yield d
        hasher.stop()
        hasher.join(timeout=5)

        # check log
        log_msg = [r.message for r in handler.records
                   if "path hash pushed" in r.message][0]
        self.assertTrue("path" in log_msg)
        self.assertTrue("hash" in log_msg)
        self.assertTrue("crc" in log_msg)
        self.assertTrue("size" in log_msg)
        self.assertTrue("st_ino" in log_msg)
        self.assertTrue("st_size" in log_msg)
        self.assertTrue("st_mtime" in log_msg)
        hasher.logger.removeHandler(handler)
    def test_logs(self):
        """Unhandled exceptions logs in error."""
        # set up logger
        handler = MementoHandler()
        handler.setLevel(logging.DEBUG)
        l = logging.getLogger('magicicada')

        # call
        l.addHandler(handler)
        self.addCleanup(l.removeHandler, handler)
        exc = self._get_exception_data()
        try:
            exception_handler(*exc)
        finally:
            l.removeHandler(handler)

        # check
        self.assertTrue(handler.check_error("Unhandled exception",
                                            "ZeroDivisionError"))
class BaseTestCase(DBusTestCase):
    """Base test case."""

    timeout = 8
    app_name = APP_NAME
    error_dict = None

    @inlineCallbacks
    def setUp(self):
        yield super(BaseTestCase, self).setUp()
        FakedSSOService.app_name = self.app_name
        FakedSSOService.error_dict = self.error_dict

        self.memento = MementoHandler()
        self.memento.setLevel(logging.DEBUG)
        logger.addHandler(self.memento)

        self.sso_server = self.register_server(ubuntu_sso.DBUS_BUS_NAME,
                                ubuntu_sso.DBUS_CREDENTIALS_PATH,
                                FakedSSOService)  # faked SSO server
        self.args = {'window_id': '803'}

    def register_server(self, bus_name, object_path, service_class):
        """Register a service on the session bus."""
        name = self.bus.request_name(bus_name, dbus.bus.NAME_FLAG_DO_NOT_QUEUE)
        self.assertNotEqual(name, dbus.bus.REQUEST_NAME_REPLY_EXISTS,
                            'Service %s should not be running.' % bus_name)
        mock = service_class(object_path=object_path, conn=self.bus)
        self.addCleanup(mock.remove_from_connection)
        self.addCleanup(self.bus.release_name, bus_name)

        return mock

    def get_proxy(self, bus_name, object_path, dbus_interface):
        obj = self.bus.get_object(bus_name=bus_name, object_path=object_path,
                                  follow_name_owner_changes=True)
        proxy = dbus.Interface(object=obj, dbus_interface=dbus_interface)
        return proxy

    def get_sso_proxy(self):
        return self.get_proxy(bus_name=ubuntu_sso.DBUS_BUS_NAME,
                              object_path=ubuntu_sso.DBUS_CREDENTIALS_PATH,
                              dbus_interface=ubuntu_sso.DBUS_CREDENTIALS_IFACE)
class BaseFSMonitorTestCase(testcase.BaseTwistedTestCase):
    """Test the structures where we have the path/watch."""

    timeout = 3

    @defer.inlineCallbacks
    def setUp(self):
        """Set up."""
        yield super(BaseFSMonitorTestCase, self).setUp()
        fsmdir = self.mktemp('fsmdir')
        partials_dir = self.mktemp('partials_dir')
        self.root_dir = self.mktemp('root_dir')
        self.vm = testcase.FakeVolumeManager(self.root_dir)
        self.tritcask_dir = self.mktemp("tritcask_dir")
        self.db = Tritcask(self.tritcask_dir)
        self.fs = filesystem_manager.FileSystemManager(fsmdir, partials_dir,
                                                       self.vm, self.db)
        self.fs.create(path=self.root_dir, share_id='', is_dir=True)
        self.fs.set_by_path(path=self.root_dir,
                            local_hash=None, server_hash=None)
        eq = event_queue.EventQueue(self.fs)

        self.deferred = deferred = defer.Deferred()

        class HitMe(object):
            # class-closure, cannot use self, pylint: disable-msg=E0213
            def handle_default(innerself, event, **args):
                deferred.callback(True)

        eq.subscribe(HitMe())
        self.monitor = eq.monitor
        self.log_handler = MementoHandler()
        self.log_handler.setLevel(logging.DEBUG)
        self.monitor.log.addHandler(self.log_handler)

    @defer.inlineCallbacks
    def tearDown(self):
        """Clean up the tests."""
        self.monitor.shutdown()
        self.monitor.log.removeHandler(self.log_handler)
        yield super(BaseFSMonitorTestCase, self).tearDown()
class HeartbeatListenerTestCase(TestCase):
    """Tests for HeartbeatListener class."""
    def setUp(self):
        super(HeartbeatListenerTestCase, self).setUp()
        self.stdin = StringIO()
        self.stdout = StringIO()
        self.stderr = StringIO()
        self.mocker = Mocker()
        self.rpc = self.mocker.mock()
        self.listener = HeartbeatListener(1,
                                          10, ['foo'], [],
                                          self.rpc,
                                          stdin=self.stdin,
                                          stdout=self.stdout,
                                          stderr=self.stderr)
        self.next_fail = {}
        self.handler = MementoHandler()
        self.listener.logger.addHandler(self.handler)
        self.listener.logger.setLevel(logging.DEBUG)
        self.handler.setLevel(logging.DEBUG)
        self.listener.logger.propagate = False
        self.processes = [
            dict(name="heartbeat", group="heartbeat", pid="101", state=RUNNING)
        ]
        self.handler.debug = True

    def tearDown(self):
        self.listener.logger.removeHandler(self.handler)
        self.handler.close()
        self.next_fail = None
        self.handler = None
        self.listener = None
        super(HeartbeatListenerTestCase, self).tearDown()

    def fail_next_stop(self, pname):
        """Make next stopProcess to fail."""
        expect(self.rpc.supervisor.stopProcess(pname)).throw(
            xmlrpclib.Fault(42, "Failed to stop the process."))

    def fail_next_start(self, pname):
        """Make next startProcess to fail."""
        expect(self.rpc.supervisor.startProcess(pname)).throw(
            xmlrpclib.Fault(42, "Failed to start the process."))

    def test_restart(self):
        """Test the restart method."""
        expect(self.rpc.supervisor.stopProcess("foo"))
        expect(self.rpc.supervisor.startProcess("foo"))
        with self.mocker:
            self.listener.restart("foo", "testing")
        self.assertTrue(
            self.handler.check_info("Restarting foo (last "
                                    "hearbeat: testing)"))

    def test_restart_fail_stop(self):
        """Test the restart method failing to stop the process."""
        self.fail_next_stop("foo")
        last = time.time()
        with self.mocker:
            try:
                self.listener.restart("foo", last)
            except xmlrpclib.Fault:
                msg = ("Failed to stop process %s (last heartbeat: %s), "
                       "exiting: %s") % \
                    ("foo", last, "<Fault 42: 'Failed to stop the process.'>")
                self.assertTrue(self.handler.check_error(msg))
            else:
                self.fail("Should get an xmlrpclib.Fault")

    def test_restart_fail_start(self):
        """Test the restart method failing to start the process."""
        expect(self.rpc.supervisor.stopProcess("foo"))
        self.fail_next_start("foo")
        last = time.time()
        with self.mocker:
            try:
                self.listener.restart("foo", last)
            except xmlrpclib.Fault:
                msg = (
                    'Failed to start process %s after stopping it, exiting: %s'
                ) % ("foo", "<Fault 42: 'Failed to start the process.'>")
                self.assertTrue(self.handler.check_error(msg))
            else:
                self.fail("Should get an xmlrpclib.Fault")

    def test_check_processes(self):
        """Test the check_processes method."""
        # add the fake process to the process list
        self.processes.append(
            dict(name="foo", group="foo", pid="42", state=RUNNING))
        self.processes.append(
            dict(name="bar", group="bar", pid="43", state=RUNNING))
        self.listener.processes = ['bar']
        # 2 process to restart
        self.listener.data['foo'] = {
            'time': time.time() - (self.listener.timeout + 2)
        }
        self.listener.data['bar'] = {
            'time': time.time() - (self.listener.timeout + 3)
        }
        self.listener.data['p-1'] = {
            'time': time.time() - (self.listener.timeout - 1)
        }
        expect(self.rpc.supervisor.getAllProcessInfo()).result(self.processes)
        expect(self.rpc.supervisor.stopProcess("foo:"))
        expect(self.rpc.supervisor.startProcess("foo:"))
        expect(self.rpc.supervisor.stopProcess("bar:bar"))
        expect(self.rpc.supervisor.startProcess("bar:bar"))
        with self.mocker:
            self.listener.check_processes()

    def test_check_processes_no_data(self):
        """Test the check_processes method with no data of a process."""
        # add the fake process to the process list
        self.processes.append(
            dict(name="foo", group="foo", pid="42", state=RUNNING))
        self.processes.append(
            dict(name="bar", group="bar", pid="43", state=RUNNING))
        self.listener.processes = ['bar']
        expect(self.rpc.supervisor.getAllProcessInfo()).result(self.processes)
        expect(self.rpc.supervisor.stopProcess("foo:"))
        expect(self.rpc.supervisor.startProcess("foo:"))
        expect(self.rpc.supervisor.stopProcess("bar:bar"))
        expect(self.rpc.supervisor.startProcess("bar:bar"))
        with self.mocker:
            # one process to restart
            self.listener.check_processes()
        self.assertTrue(
            self.handler.check_warning(
                "Restarting process foo:foo (42), as we never received a hearbeat"
                " event from it"))
        self.assertTrue(
            self.handler.check_warning(
                "Restarting process bar:bar (43), as we never received a hearbeat"
                " event from it"))

    def test_check_processes_untracked(self):
        """Test the check_processes method with a untracked proccess."""
        # add the fake process to the process list
        self.processes.append(
            dict(name="foo-untracked",
                 group="untracked",
                 pid="43",
                 state=RUNNING))
        # add a new tracked process from an untracked group
        self.processes.append(
            dict(name="bar-untracked", group="bar", pid="44", state=RUNNING))
        self.listener.processes = ['bar']
        expect(self.rpc.supervisor.getAllProcessInfo()).result(self.processes)
        with self.mocker:
            self.listener.check_processes()
        self.assertTrue(
            self.handler.check_info(
                "Ignoring untracked:foo-untracked (43) as isn't tracked."))
        self.assertTrue(
            self.handler.check_info(
                "Ignoring bar:bar-untracked (44) as isn't tracked."))

    def test_check_processes_not_running(self):
        """Test the check_processes method if the proccess isn't running."""
        # add the fake process to the process list
        self.processes.append(
            dict(name="foo",
                 group="foo",
                 pid="42",
                 state=states.ProcessStates.STARTING))
        # add a new tracked process from an untracked group
        self.processes.append(
            dict(name="bar",
                 group="bar",
                 pid="43",
                 state=states.ProcessStates.STARTING))
        self.listener.processes = ['bar']
        # 2 processes to restart
        self.listener.data['foo'] = {
            'time': time.time() - (self.listener.timeout + 2)
        }
        self.listener.data['bar'] = {
            'time': time.time() - (self.listener.timeout + 2)
        }
        expect(self.rpc.supervisor.getAllProcessInfo()).result(self.processes)
        with self.mocker:
            self.listener.check_processes()
        self.assertTrue(
            self.handler.check_info("Ignoring foo:foo (42) as isn't running."))
        self.assertTrue(
            self.handler.check_info("Ignoring bar:bar (43) as isn't running."))

    def test_handle_heartbeat(self):
        """Test handle_heartbeat method."""
        payload = {"time": time.time()}
        self.listener.handle_heartbeat('process_name', 'group_name', '42',
                                       payload)
        info = {
            "pid": "42",
            "time": payload["time"],
            "received": self.listener.data["process_name"]["received"]
        }
        self.assertEqual({"process_name": info}, self.listener.data)

    def test_handle_event(self):
        """Test handle_event method."""
        # patch handle_heartbeat
        called = []

        def handle_heartbeat(process_name, group_name, pid, payload):
            """Fake handle_heartbeat."""
            called.append((process_name, group_name, pid, payload))

        self.listener.handle_heartbeat = handle_heartbeat
        payload_dict = {u"time": time.time(), "type": "heartbeat"}
        raw_data = ("processname:ticker groupname:ticker pid:42\n" +
                    json.dumps(payload_dict))
        raw_header = ("ver:3.0 server:supervisor serial:1 pool:listener "
                      "poolserial:10 eventname:PROCESS_COMMUNICATION_STDOUT"
                      " len:%s\n" % len(raw_data))
        self.stdin.write(raw_header + raw_data)
        self.stdin.seek(0)
        headers = childutils.get_headers(raw_header)
        self.listener._handle_event()
        # check
        self.assertEqual(1, len(called))
        del payload_dict['type']
        self.assertEqual(('ticker', 'ticker', '42', payload_dict), called[0])
        self.assertTrue(
            self.handler.check_debug("Event '%s' received: %r" %
                                     (headers['eventname'], raw_data)))
        # check the stdout info
        self.assertEqual(["READY", "RESULT 2", "OK"],
                         self.stdout.getvalue().split("\n"))

    def test_invalid_event_type(self):
        """Test with an invalid type."""
        payload_dict = {u"time": time.time(), "type": "ping"}
        raw_data = 'processname:ticker groupname:ticker pid:42\n' + \
            json.dumps(payload_dict)
        raw_header = ("ver:3.0 server:supervisor serial:1 pool:listener "
                      "poolserial:10 eventname:PROCESS_COMMUNICATION_STDOUT"
                      " len:%s\n" % len(raw_data))
        self.stdin.write(raw_header + raw_data)
        self.stdin.seek(0)
        self.listener._handle_event()
        # check
        self.assertTrue(
            self.handler.check_error("Unable to handle event type '%s' - %r" %
                                     ('ping', raw_data)))

    def test_invalid_payload(self):
        """Test with an invalid payload."""
        payload_dict = {u"time": time.time(), "type": "ping"}
        raw_data = 'processname:ticker groupname:ticker pid:42\n' + \
            json.dumps(payload_dict) + "<!foo>"
        raw_header = ("ver:3.0 server:supervisor serial:1 pool:listener "
                      "poolserial:10 eventname:PROCESS_COMMUNICATION_STDOUT"
                      " len:%s\n" % len(raw_data))
        self.stdin.write(raw_header + raw_data)
        self.stdin.seek(0)
        self.listener._handle_event()
        # check
        self.assertTrue(
            self.handler.check_error("Unable to handle event type '%s' - %r" %
                                     ('None', raw_data)))

    def test_unhandled_event(self):
        """A unhandled event type."""
        payload_dict = {u"time": time.time(), "type": "ping"}
        raw_data = 'processname:ticker groupname:ticker pid:42\n' + \
            json.dumps(payload_dict)
        raw_header = "ver:3.0 server:supervisor serial:1 pool:heartbeat " + \
            "poolserial:1 eventname:UNKNOWN len:%s\n" % len(raw_data)
        self.stdin.write(raw_header + raw_data)
        self.stdin.seek(0)
        self.listener._handle_event()
        # check
        self.assertTrue(
            self.handler.check_warning("Received unsupported event: %s - %r" %
                                       ('UNKNOWN', raw_data)))

    def test_check_interval(self):
        """Check that we properly check on the specified interval."""
        header = "ver:3.0 server:supervisor serial:1 pool:heartbeat " + \
                 "poolserial:1 eventname:TICK_5 len:0\n"
        expect(self.rpc.supervisor.getAllProcessInfo()).result([])
        self.stdin.write(header)
        self.stdin.seek(0)
        self.listener._handle_event()
        self.assertEqual(self.listener.tick_count, 1)
        self.stdin.seek(0)
        with self.mocker:
            self.listener._handle_event()
class ReactorInspectorTestCase(TwistedTestCase):
    """Test the ReactorInspector class."""

    def setUp(self):
        """Set up."""
        class Helper(object):
            """Fake object with a controllable call."""
            def __init__(self):
                self.call_count = 1
                self.calls = []
                self.ri = None

            def call(self, func):
                """Call function when counter is 0, then stop running."""
                self.call_count -= 1
                self.calls.append(func)
                if self.call_count == 0:
                    for f in self.calls:
                        f()
                if self.call_count <= 0:
                    self.ri.stop()

        class FakeMetrics(object):
            """Fake Metrics object that records calls."""
            def __init__(self):
                """Initialize calls."""
                self.calls = []

            def meter(self, name, count):
                """Record call to meter()."""
                self.calls.append(("meter", name, count))

            def gauge(self, name, val):
                """Record call to gauge()."""
                self.calls.append(("gauge", name, val))

        logger = logging.getLogger("storage.server")
        logger.propagate = False
        logger.setLevel(TRACE)
        self.handler = MementoHandler()
        self.handler.setLevel(TRACE)
        logger.addHandler(self.handler)
        self.addCleanup(logger.removeHandler, self.handler)
        self.helper = Helper()
        self.fake_metrics = FakeMetrics()
        MetricsConnector.register_metrics("reactor_inspector",
                                          instance=self.fake_metrics)
        self.addCleanup(MetricsConnector.unregister_metrics)
        self.ri = ReactorInspector(logger, self.helper.call, loop_time=.1)
        self.helper.ri = self.ri

    def run_ri(self, call_count=None, join=True):
        """Set the call count and then run the ReactorInspector."""
        if call_count is not None:
            self.helper.call_count = call_count
        self.start_ts = time.time()
        self.ri.start()
        # Reactor will stop after call_count calls, thanks to helper
        if join:
            self.ri.join()

    def test_stop(self):
        """It stops."""
        self.run_ri(1000, join=False)
        assert self.ri.is_alive()
        self.ri.stop()
        self.ri.join()
        self.assertFalse(self.ri.is_alive())

    @defer.inlineCallbacks
    def test_dump_frames(self):
        """Test how frames are dumped.

        Rules:
        - own frame must not be logged
        - must log all other threads
        - main reactor thread must have special title
        """
        # other thread, whose frame must be logged
        waitingd = defer.Deferred()

        def waiting_function():
            """Function with funny name to be checked later."""
            reactor.callFromThread(waitingd.callback, True)
            # wait have a default value
            event.wait()

        event = threading.Event()
        threading.Thread(target=waiting_function).start()
        # Make sure the thread has entered the waiting_function
        yield waitingd

        # Set reactor_thread since we're not starting the ReactorInspector
        # thread here.
        self.ri.reactor_thread = threading.currentThread().ident

        # dump frames in other thread, also
        def dumping_function():
            """Function with funny name to be checked later."""
            time.sleep(.1)
            self.ri.dump_frames()
            reactor.callFromThread(d.callback, True)

        d = defer.Deferred()
        threading.Thread(target=dumping_function).start()
        yield d
        event.set()

        # check
        self.assertFalse(self.handler.check_debug("dumping_function"))
        self.assertTrue(self.handler.check_debug("Dumping Python frame",
                                                 "waiting_function"))
        self.assertTrue(self.handler.check_debug("Dumping Python frame",
                                                 "reactor main thread"))

    def test_reactor_ok(self):
        """Reactor working fast."""
        self.run_ri()
        ok_line = self.handler.check(TRACE, "ReactorInspector: ok")
        self.assertTrue(ok_line)
        self.assertTrue(ok_line.args[-1] >= 0)  # Should be near zero delay
        # Check the metrics
        expected_metric = ("gauge", "delay", ok_line.args[-1])
        self.assertEqual([expected_metric], self.fake_metrics.calls)
        self.assertTrue(self.ri.last_responsive_ts >= self.start_ts)

    @defer.inlineCallbacks
    def test_reactor_blocked(self):
        """Reactor not working fast."""
        dump_frames_called = defer.Deferred()
        self.ri.dump_frames = lambda: dump_frames_called.callback(True)
        self.run_ri(0)
        yield dump_frames_called
        log_line = self.handler.check(logging.CRITICAL, "ReactorInspector",
                                      "detected unresponsive")
        self.assertTrue(log_line)
        self.assertTrue(log_line.args[-1] >= .1)  # waited for entire loop time
        # Check the metrics
        expected_metric = ("gauge", "delay", log_line.args[-1])
        self.assertEqual([expected_metric], self.fake_metrics.calls)

        self.assertTrue(self.ri.last_responsive_ts < self.start_ts)

    def test_reactor_back_alive(self):
        """Reactor resurrects after some loops."""
        self.run_ri(3)
        late_line = self.handler.check_warning("ReactorInspector: late",
                                               "got: 0")
        self.assertTrue(late_line)
        self.assertTrue(late_line.args[-1] >= .2)  # At least 2 cycles of delay
        # Check the metrics
        expected_metric = ("gauge", "delay", late_line.args[-1])
        self.assertEqual(expected_metric, self.fake_metrics.calls[-1])

        self.assertTrue(self.ri.queue.empty())
        # A late reactor is not considered responsive (until a successful loop)
        self.assertTrue(self.ri.last_responsive_ts < self.start_ts)
class MainTests(BaseTwistedTestCase):
    """ Basic tests to check main.Main """

    @defer.inlineCallbacks
    def setUp(self):
        """ Sets up a test. """
        yield super(MainTests, self).setUp()
        self.root = self.mktemp('root')
        self.shares = self.mktemp('shares')
        self.data = self.mktemp('data')
        self.partials_dir = self.mktemp('partials_dir')

        self.patch(main_mod, 'SyncdaemonService', FakedExternalInterface)
        # no status listener by default
        self.patch(main_mod.status_listener, "get_listener", lambda *a: None)

        self.handler = MementoHandler()
        self.handler.setLevel(logging.DEBUG)
        self._logger = logging.getLogger('ubuntuone.SyncDaemon')
        self._logger.addHandler(self.handler)
        self.addCleanup(self._logger.removeHandler, self.handler)

    def _get_main_common_params(self):
        """Return the parameters used by the all platforms."""
        return dict(root_dir=self.root,
                    shares_dir=self.shares,
                    data_dir=self.data,
                    partials_dir=self.partials_dir,
                    host='localhost', port=0,
                    dns_srv=False, ssl=False,
                    mark_interval=60,
                    handshake_timeout=2,
                    auth_credentials=FAKED_CREDENTIALS,
                    monitor_class=FakeMonitor)

    def build_main(self, **kwargs):
        """Build and return a Main object.

        Use reasonable defaults for the tests, plus whatever extra kwargs are
        passed in.

        """
        # get the params using the platform code to ensure they are correct
        params = self._get_main_common_params()
        params.update(kwargs)
        m = main_mod.Main(**params)
        self.addCleanup(m.shutdown)
        m.local_rescan = lambda *_: m.event_q.push('SYS_LOCAL_RESCAN_DONE')
        return m

    def test_main_initialization(self):
        """test that creating a Main instance works as expected."""
        main = self.build_main()
        self.assertIsInstance(main, main_mod.Main)

    def test_main_start(self):
        """Test that Main.start works."""
        main = self.build_main()
        main.start()

    def test_main_restarts_on_critical_error(self):
        """Test that Main restarts when syncdaemon gets into UNKNOWN_ERROR."""
        self.restarted = False
        main = self.build_main()
        main.restart = lambda: setattr(self, 'restarted', True)
        main.start()
        main.event_q.push('SYS_UNKNOWN_ERROR')
        self.assertTrue(self.restarted)

    @defer.inlineCallbacks
    def test_shutdown_pushes_sys_quit(self):
        """When shutting down, the SYS_QUIT event is pushed."""
        params = self._get_main_common_params()
        main = main_mod.Main(**params)
        events = []
        self.patch(main.event_q, 'push',
                   lambda *a, **kw: events.append((a, kw)))

        yield main.shutdown()
        expected = [(('SYS_USER_DISCONNECT',), {}), (('SYS_QUIT',), {})]
        self.assertEqual(expected, events)

    def test_handshake_timeout(self):
        """Check connecting times out."""
        d0 = defer.Deferred()

        class Handler:
            """Trivial event handler."""
            def handle_SYS_HANDSHAKE_TIMEOUT(self):
                """Pass the test when we get this event."""
                reactor.callLater(0, d0.callback, None)

        main = self.build_main(handshake_timeout=0)

        def fake_connect(*a):
            """Only connect when States told so."""
            main.event_q.push('SYS_CONNECTION_MADE')
            return defer.Deferred()
        main.action_q.connect = fake_connect

        # fake the following to not be executed
        main.get_root = lambda *_: defer.Deferred()
        main.action_q.check_version = lambda *_: defer.Deferred()

        main.event_q.subscribe(Handler())
        main.start()
        main.event_q.push('SYS_NET_CONNECTED')
        main.event_q.push('SYS_USER_CONNECT', access_token='')
        return d0

    def test_create_dirs_already_exists_dirs(self):
        """test that creating a Main instance works as expected."""
        link = os.path.join(self.root, 'Shared With Me')
        self.assertFalse(is_link(link))
        self.assertTrue(path_exists(self.shares))
        self.assertTrue(path_exists(self.root))
        main = self.build_main()
        # check that the shares link is actually a link
        self.assertTrue(is_link(main.shares_dir_link))
        self.assertEquals(link, main.shares_dir_link)

    def test_create_dirs_already_exists_symlink_too(self):
        """test that creating a Main instance works as expected."""
        link = os.path.join(self.root, 'Shared With Me')
        make_link(self.shares, link)
        self.assertTrue(is_link(link))
        self.assertTrue(path_exists(self.shares))
        self.assertTrue(path_exists(self.root))
        main = self.build_main()
        # check that the shares link is actually a link
        self.assertTrue(is_link(main.shares_dir_link))

    def test_create_dirs_already_exists_but_not_symlink(self):
        """test that creating a Main instance works as expected."""
        link = os.path.join(self.root, 'Shared With Me')
        make_dir(link, recursive=True)
        self.assertTrue(path_exists(link))
        self.assertFalse(is_link(link))
        self.assertTrue(path_exists(self.shares))
        self.assertTrue(path_exists(self.root))
        main = self.build_main()
        # check that the shares link is actually a link
        self.assertEquals(main.shares_dir_link, link)
        self.assertFalse(is_link(main.shares_dir_link))

    def test_create_dirs_none_exists(self):
        """test that creating a Main instance works as expected."""
        # remove the existing dirs
        remove_dir(self.root)
        remove_dir(self.shares)
        main = self.build_main()
        # check that the shares link is actually a link
        self.assertTrue(is_link(main.shares_dir_link))
        self.assertTrue(path_exists(self.shares))
        self.assertTrue(path_exists(self.root))

    def test_connect_if_autoconnect_is_enabled(self):
        """If autoconnect option is enabled, connect the syncdaemon."""
        user_config = main_mod.config.get_user_config()
        orig = user_config.get_autoconnect()
        user_config.set_autoconnect(True)
        self.addCleanup(user_config.set_autoconnect, orig)

        main = self.build_main()
        expected = [('connect', (), {'autoconnecting': True})]
        self.assertEqual(main.external._called, expected)

    def test_dont_connect_if_autoconnect_is_disabled(self):
        """If autoconnect option is disabled, do not connect the syncdaemon."""
        user_config = main_mod.config.get_user_config()
        orig = user_config.get_autoconnect()
        user_config.set_autoconnect(False)
        self.addCleanup(user_config.set_autoconnect, orig)

        main = self.build_main()
        self.assertEqual(main.external._called, [])

    def _get_listeners(self, main):
        """Return the subscribed objects."""
        s = set()
        for listener in main.event_q.listener_map.values():
            for x in listener:
                s.add(x)
        return s

    def test_status_listener_is_installed(self):
        """The status listener is installed if needed."""
        self.patch(main_mod.status_listener,
                   "get_listener", lambda *a: FakeListener())
        main = self.build_main()
        self.assertIn(main.status_listener, self._get_listeners(main))

    def test_status_listener_not_installed_when_disabled(self):
        """The status listener is not started if it's not available."""
        main = self.build_main()
        self.assertNotIn(main.status_listener, self._get_listeners(main))

    def test_get_homedir(self):
        """The get_homedir returns the root dir."""
        self.patch(main_mod, "user_home", self.home_dir)
        expected = expand_user('~')
        main = self.build_main()
        self.assertEqual(main.get_homedir(), expected)

    def test_get_rootdir(self):
        """The get_rootdir returns the root dir."""
        expected = expand_user(os.path.join('~', 'Ubuntu Test One'))
        main = self.build_main(root_dir=expected)
        self.assertEqual(main.get_rootdir(), expected)

    def test_get_sharesdir(self):
        """The get_sharesdir returns the shares dir."""
        expected = expand_user(os.path.join('~', 'Share it to Me'))
        main = self.build_main(shares_dir=expected)
        self.assertEqual(main.get_sharesdir(), expected)

    def test_get_sharesdirlink(self):
        """The get_sharesdirlink returns the shares dir link."""
        expected = 'Share it to Me'
        main = self.build_main(shares_symlink_name=expected)
        self.assertEqual(main.get_sharesdir_link(),
                         os.path.join(main.get_rootdir(), expected))

    def test_version_is_logged(self):
        """Test that the client version is logged."""
        self.build_main()
        self.assertTrue(self.handler.check_info("client version", VERSION))

    def test_mark(self):
        """Check the MARK logs ok."""
        main = self.build_main()
        main.log_mark()
        shouldlog = ('MARK', "State: 'INIT'", 'queues IDLE', 'connection',
                     'queue: 0', 'offloaded: 0', 'hash: 0')
        self.assertTrue(self.handler.check(NOTE, *shouldlog))
class ClientDummyAuthTests(AuthenticationBaseTestCase):
    """Client authentication tests using the dummy auth provider."""

    auth_provider_class = DummyAuthProvider

    @defer.inlineCallbacks
    def setUp(self):
        yield super(ClientDummyAuthTests, self).setUp()
        self.creds = "open sesame"
        self.bad_creds = "not my secret"
        self.handler = MementoHandler()
        logger = logging.getLogger("storage.server")
        logger.addHandler(self.handler)
        self.addCleanup(logger.removeHandler, self.handler)
        self.handler.setLevel(logging.DEBUG)

    def assert_auth_ok_logging(self):
        self.assertTrue(self.handler.check_debug("authenticated user", "OK", self.usr0.username))
        self.assertFalse(self.handler.check_warning("missing user"))

    def assert_auth_ok_missing_user(self):
        self.assertTrue(self.handler.check_debug("missing user", "(id=%s)" % self.usr0.id))
        self.assertFalse(self.handler.check_info("authenticated user"))

    @defer.inlineCallbacks
    def test_auth_ok_user_ok(self):
        """Correct authentication must succeed."""
        yield self.callback_test(self.do_auth, credentials=self.creds, add_default_callbacks=True)
        self.assert_auth_ok_logging()

    @defer.inlineCallbacks
    def test_auth_ok_bad_user(self):
        """Non existing user must fail authentication."""
        # make the user getter fail
        self.patch(self.service.factory.content, "get_user_by_id", lambda *a, **k: defer.fail(DoesNotExist()))

        d = self.callback_test(self.do_auth, credentials=self.creds, add_default_callbacks=True)
        yield self.assertFailure(d, protocol_errors.AuthenticationFailedError)

        self.assert_auth_ok_missing_user()

    @defer.inlineCallbacks
    def test_auth_ok_with_session_id(self):
        """Correct authentication must succeed and include the session_id."""
        auth_request = yield self.callback_test(self.do_auth, credentials=self.creds, add_default_callbacks=True)

        protocol = self.service.factory.protocols[0]
        self.assertEqual(auth_request.session_id, str(protocol.session_id))

    @defer.inlineCallbacks
    def test_auth_ok_with_metadata(self):
        """Correct authentication must succeed and include metadata."""
        m_called = []
        self.service.factory.metrics.meter = lambda *a: m_called.append(a)

        metadata = {u"platform": u"linux2", u"version": u"1.0", u"foo": u"bar"}
        yield self.callback_test(self.do_auth, credentials=self.creds, metadata=metadata, add_default_callbacks=True)

        self.assertTrue(self.handler.check_info("Client metadata: %s" % metadata))
        self.assertIn(("client.platform.linux2", 1), m_called)
        self.assertIn(("client.version.1_0", 1), m_called)
        self.assertNotIn(("client.foo.bar", 1), m_called)

    def test_auth_fail(self):
        """Wrong secret must fail."""

        def test(client, **kwargs):
            d = self.do_auth(client, credentials=self.bad_creds)
            d.addCallbacks(
                lambda _: client.test_fail(Exception("Should not succeed.")), lambda _: client.test_done("ok")
            )

        return self.callback_test(test)

    def test_get_root(self):
        """Must receive the root after authentication."""

        @defer.inlineCallbacks
        def test(client, **kwargs):
            yield self.do_auth(client, credentials=self.creds)
            root_id = yield client.get_root()
            self.assertIsNotNone(root_id)

        return self.callback_test(test, add_default_callbacks=True)

    def test_get_root_twice(self):
        """Get root must keep the root id."""

        @defer.inlineCallbacks
        def test(client, **kwargs):
            yield self.do_auth(client, credentials=self.creds)
            root_id1 = yield client.get_root()
            root_id2 = yield client.get_root()
            self.assertEqual(root_id1, root_id2)

        return self.callback_test(test, add_default_callbacks=True)

    def test_user_becomes_inactive(self):
        """After StorageUser authentication ok it becomes inactive."""

        @defer.inlineCallbacks
        def test(client):
            """Test."""
            yield self.do_auth(client, credentials=self.creds)
            root_id = yield client.get_root()

            # create one file, should be ok
            yield client.make_file(request.ROOT, root_id, "f1")

            # cancel user subscription, so it needs
            # to get it again from the DB
            self.usr0.update(subscription=False)

            # create second file, should NOT be ok
            try:
                yield client.make_file(request.ROOT, root_id, "f2")
            except protocol_errors.DoesNotExistError:
                pass  # failed as we expected
            else:
                client.test_fail("It should have failed!")

        return self.callback_test(test, add_default_callbacks=True)
class NodeStructureTestCase(unittest.TestCase):
    """Tests that we store the node commands ok."""

    def setUp(self):
        """Set up the test."""
        self.qc = QueueContent(home='/')
        self.handler = MementoHandler()
        self.handler.setLevel(logging.DEBUG)
        logger = logging.getLogger('magicicada.queue_content')
        logger.addHandler(self.handler)
        logger.setLevel(logging.DEBUG)
        self.addCleanup(logger.removeHandler, self.handler)

    def test_one_node_file(self):
        """Add one node with a file op."""
        self.qc.set_content([('MakeFile', '123', {'path': '/a/b/foo'})])
        self.assertEqual(len(self.qc._node_ops), 1)

        node = self.qc._node_ops[''].children['a']
        self.assertEqual(node.last_modified, None)
        self.assertEqual(node.kind, KIND_DIR)
        self.assertEqual(node.operations, [])
        self.assertEqual(node.done, None)
        self.assertEqual(len(node.children), 1)

        node = node.children['b']
        self.assertEqual(node.last_modified, None)
        self.assertEqual(node.kind, KIND_DIR)
        self.assertEqual(node.operations, [])
        self.assertEqual(node.done, None)
        self.assertEqual(len(node.children), 1)

        node = node.children['foo']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_FILE)
        expected = [('123', 'MakeFile',
                     {'path': '/a/b/foo', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 0)

    def test_one_node_dir(self):
        """Add one node with a dir op."""
        self.qc.set_content([('MakeDir', '123', {'path': '/a/boo'})])
        self.assertEqual(len(self.qc._node_ops), 1)

        node = self.qc._node_ops[''].children['a']
        self.assertEqual(node.last_modified, None)
        self.assertEqual(node.kind, KIND_DIR)
        self.assertEqual(node.operations, [])
        self.assertEqual(node.done, None)
        self.assertEqual(len(node.children), 1)

        node = node.children['boo']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_DIR)
        expected = [('123', 'MakeDir', {'path': '/a/boo', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 0)

    def test_one_node_unknown(self):
        """Add one node with a unknown op."""
        self.qc.set_content([('Unlink', '123', {'path': '/a/boo'})])
        self.assertEqual(len(self.qc._node_ops), 1)

        node = self.qc._node_ops[''].children['a']
        self.assertEqual(node.last_modified, None)
        self.assertEqual(node.kind, KIND_DIR)
        self.assertEqual(node.operations, [])
        self.assertEqual(node.done, None)
        self.assertEqual(len(node.children), 1)

        node = node.children['boo']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_UNKNOWN)
        expected = [('123', 'Unlink', {'path': '/a/boo', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 0)

    def test_one_node_known_unknown(self):
        """Add one node known op with a later unknown op."""
        self.qc.set_content([('MakeFile', '123', {'path': '/a'})])
        self.assertEqual(len(self.qc._node_ops), 1)

        node = self.qc._node_ops[''].children['a']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_FILE)
        expected = [('123', 'MakeFile', {'path': '/a', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 0)

        self.qc.set_content([('Unlink', '456', {'path': '/a'})])
        self.assertEqual(len(self.qc._node_ops), 1)

        node = self.qc._node_ops[''].children['a']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_FILE)
        expected = [('123', 'MakeFile', {'path': '/a', '__done__': False}),
                    ('456', 'Unlink', {'path': '/a', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 0)

    def test_one_node_unknown_known(self):
        """Add one node unknown op with a later known op."""
        self.qc.set_content([('Unlink', '123', {'path': '/a'})])
        self.assertEqual(len(self.qc._node_ops), 1)

        node = self.qc._node_ops[''].children['a']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_UNKNOWN)
        expected = [('123', 'Unlink', {'path': '/a', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 0)

        self.qc.set_content([('MakeDir', '456', {'path': '/a'})])
        self.assertEqual(len(self.qc._node_ops), 1)

        node = self.qc._node_ops[''].children['a']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_DIR)
        expected = [('123', 'Unlink', {'path': '/a', '__done__': False}),
                    ('456', 'MakeDir', {'path': '/a', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 0)

    def test_several_nodes_mixed(self):
        """Add some nodes with different combinations."""
        # add /a/b
        self.qc.set_content([('MakeDir', '12', {'path': '/a/b'})])
        self.assertEqual(len(self.qc._node_ops), 1)

        node = self.qc._node_ops[''].children['a']
        self.assertEqual(node.last_modified, None)
        self.assertEqual(node.kind, KIND_DIR)
        self.assertEqual(node.operations, [])
        self.assertEqual(node.done, None)
        self.assertEqual(len(node.children), 1)

        node = node.children['b']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_DIR)
        expected = [('12', 'MakeDir', {'path': '/a/b', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 0)

        # add /a/b/foo
        self.qc.set_content([('MakeDir', '34', {'path': '/a/b/foo'})])
        node = self.qc._node_ops[''].children['a']

        node = node.children['b']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_DIR)
        expected = [('12', 'MakeDir', {'path': '/a/b', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 1)

        node = node.children['foo']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_DIR)
        expected = [('34', 'MakeDir', {'path': '/a/b/foo', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 0)

        # add /a/b/bar
        self.qc.set_content([('MakeDir', '45', {'path': '/a/b/bar'})])
        node = self.qc._node_ops[''].children['a']

        node = node.children['b']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_DIR)
        expected = [('12', 'MakeDir', {'path': '/a/b', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 2)

        node = node.children['bar']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_DIR)
        expected = [('45', 'MakeDir', {'path': '/a/b/bar', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 0)

        # add /a/b/foo/fighters
        self.qc.set_content([('MakeFile', '67',
                              {'path': '/a/b/foo/fighters'})])
        node = self.qc._node_ops[''].children['a']

        node = node.children['b']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_DIR)
        expected = [('12', 'MakeDir', {'path': '/a/b', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 2)

        node = node.children['foo']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_DIR)
        expected = [('34', 'MakeDir', {'path': '/a/b/foo', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 1)

        node = node.children['fighters']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_FILE)
        expected = [('67', 'MakeFile',
                     {'path': '/a/b/foo/fighters', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 0)

        # other /a/b/foo/fighters
        self.qc.set_content([('Unlink', '89', {'path': '/a/b/foo/fighters'})])
        node = self.qc._node_ops[''].children['a']

        node = node.children['b']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_DIR)
        expected = [('12', 'MakeDir', {'path': '/a/b', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 2)

        node = node.children['foo']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_DIR)
        expected = [('34', 'MakeDir', {'path': '/a/b/foo', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 1)

        node = node.children['fighters']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_FILE)
        p = '/a/b/foo/fighters'
        expected = [('67', 'MakeFile', {'path': p, '__done__': False}),
                    ('89', 'Unlink', {'path': p, '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 0)

    def test_finishing_nothing(self):
        """Finish something that is not there."""
        r = self.qc.remove('MakeDir', '34', {'path': '/a/bar'})
        self.assertEqual(r, None)
        self.assertTrue(self.handler.check_warning(
                        "Element ''", "['', 'a', 'bar']", 'not in children'))

    def test_operation_error_nothing(self):
        """Finish an operation that is not there."""
        # create a node and break it on purpose
        self.qc.add('MakeDir', '12', {'path': '/a'})
        self.assertEqual(len(self.qc._node_ops), 1)
        node = self.qc._node_ops[''].children['a']
        node.operations = []

        # remove the operation and check
        r = self.qc.remove('MakeDir', '12', {'path': '/a'})
        self.assertEqual(r, None)
        self.assertTrue(self.handler.check_error(
                        "found 0 times", "MakeDir", "12"))

    def test_operation_error_several(self):
        """Finish an operation that is more than once."""
        # create a node and break it on purpose
        self.qc.add('MakeDir', '12', {'path': '/a'})
        self.assertEqual(len(self.qc._node_ops), 1)
        node = self.qc._node_ops[''].children['a']
        node.operations = node.operations * 2

        # remove the operation and check
        r = self.qc.remove('MakeDir', '12', {'path': '/a'})
        self.assertEqual(r, None)
        self.assertTrue(self.handler.check_error(
                        "found 2 times", "MakeDir", "12"))

    def test_two_ops_finishing_one(self):
        """Add some nodes with different combinations."""
        # create two dirs
        self.qc.set_content([('MakeDir', '12', {'path': '/a/foo'}),
                             ('MakeDir', '34', {'path': '/a/bar'})])
        self.assertEqual(len(self.qc._node_ops), 1)

        # all inited properly
        root = self.qc._node_ops[''].children['a']
        self.assertEqual(root.last_modified, None)
        self.assertEqual(root.kind, KIND_DIR)
        self.assertEqual(root.operations, [])
        self.assertEqual(root.done, None)
        self.assertEqual(len(root.children), 2)

        node = root.children['foo']
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_DIR)
        expected = [('12', 'MakeDir', {'path': '/a/foo', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 0)

        node = root.children['bar']
        bar_created_timestamp = node.last_modified
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_DIR)
        expected = [('34', 'MakeDir', {'path': '/a/bar', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 0)

        # finish the second make dir and check again
        self.qc.remove('MakeDir', '34', {'path': '/a/bar'})
        self.assertTrue(node.last_modified > bar_created_timestamp)
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_DIR)
        expected = [('34', 'MakeDir', {'path': '/a/bar', '__done__': True})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, True)
        self.assertEqual(len(node.children), 0)

    def test_two_ops_finishing_both(self):
        """Add two ops to the same node and finish both."""
        # create two dirs
        self.qc.set_content([('MakeFile', '12', {'path': '/a'}),
                             ('Upload', '34', {'path': '/a'})])
        self.assertEqual(len(self.qc._node_ops), 1)

        # all inited properly
        node = self.qc._node_ops[''].children['a']
        node_created_tstamp = node.last_modified
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_FILE)
        expected = [('12', 'MakeFile', {'path': '/a', '__done__': False}),
                    ('34', 'Upload', {'path': '/a', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 0)

        # finish one
        self.qc.remove('MakeFile', '12', {'path': '/a'})
        node_changed_tstamp = node.last_modified
        self.assertTrue(node.last_modified > node_created_tstamp)
        expected = [('12', 'MakeFile', {'path': '/a', '__done__': True}),
                    ('34', 'Upload', {'path': '/a', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)

        # finish the second
        self.qc.remove('Upload', '34', {'path': '/a'})
        self.assertTrue(node.last_modified > node_changed_tstamp)
        expected = [('12', 'MakeFile', {'path': '/a', '__done__': True}),
                    ('34', 'Upload', {'path': '/a', '__done__': True})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, True)

    def test_one_op_finishes_startsagain(self):
        """Add an op, finish it, add another one."""
        self.qc.add('MakeFile', '12', {'path': '/a'})
        self.assertEqual(len(self.qc._node_ops), 1)

        # all inited properly
        node = self.qc._node_ops[''].children['a']
        node_created_tstamp = node.last_modified
        self.assertTrue(isinstance(node.last_modified, float))
        self.assertEqual(node.kind, KIND_FILE)
        expected = [('12', 'MakeFile', {'path': '/a', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
        self.assertEqual(len(node.children), 0)

        # finish the op
        self.qc.remove('MakeFile', '12', {'path': '/a'})
        node_changed_tstamp = node.last_modified
        self.assertTrue(node.last_modified > node_created_tstamp)
        expected = [('12', 'MakeFile', {'path': '/a', '__done__': True})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, True)

        # send other one to the same node
        self.qc.add('Upload', '34', {'path': '/a'})
        self.assertTrue(node.last_modified > node_changed_tstamp)
        expected = [('34', 'Upload', {'path': '/a', '__done__': False})]
        self.assertEqual(node.operations, expected)
        self.assertEqual(node.done, False)
class StatsWorkerTestCase(TestCase):
    """Tests for StatsWorker class."""

    def setUp(self):
        super(StatsWorkerTestCase, self).setUp()
        self.mocker = Mocker()
        self.rpc = self.mocker.mock()
        self.worker = stats_worker.StatsWorker(10, '', self.rpc)

        # logging setup
        self.handler = MementoHandler()
        self.worker.logger.addHandler(self.handler)
        self.addCleanup(self.worker.logger.removeHandler, self.handler)
        self.worker.logger.setLevel(logging.DEBUG)
        self.handler.setLevel(logging.DEBUG)
        self.worker.logger.propagate = False
        self.handler.debug = True

    def test_collect_stats(self):
        """Test the collect_stats method."""
        called = []
        self.worker._collect_process = \
            lambda p, n: called.append(('proc', p, n)) or {}
        self.worker._collect_machine = lambda: called.append('machine') or {}
        processes = [dict(name="bar", group="foo", pid="42", state=RUNNING)]
        expect(self.rpc.supervisor.getAllProcessInfo()).result(processes)
        with self.mocker:
            self.worker.collect_stats()
        self.assertEqual(called, ['machine', ('proc', 42, 'bar')])
        self.assertTrue(self.handler.check_info("Collecting machine stats"))
        self.assertTrue(self.handler.check_info("Collecting stats for proc",
                                                "pid=42", "name=bar"))

    def test_collect_stats_not_running(self):
        """Test the collect_stats method if the proccess isn't running."""
        called = []
        self.worker._collect_process = \
            lambda p, n: called.append(('proc', p, n)) or {}
        self.worker._collect_machine = lambda: called.append('machine') or {}
        processes = [dict(name="bar", group="foo", pid="42", state=STARTING)]
        expect(self.rpc.supervisor.getAllProcessInfo()).result(processes)
        with self.mocker:
            self.worker.collect_stats()
        self.assertEqual(called, ['machine'])
        self.assertTrue(self.handler.check_info("Collecting machine stats"))
        self.assertTrue(self.handler.check_info("Ignoring process",
                                                "pid=42", "name=bar",
                                                "state=%s" % STARTING))

    def test_collect_stats_no_data(self):
        """Test the collect_stats method with no data of a process."""
        called = []
        self.worker._collect_process = \
            lambda p, n: called.append(('proc', p, n)) or {}
        self.worker._collect_machine = lambda: called.append('machine') or {}
        expect(self.rpc.supervisor.getAllProcessInfo()).result([])
        with self.mocker:
            self.worker.collect_stats()
        self.assertEqual(called, ['machine'])
        self.assertTrue(self.handler.check_info("Collecting machine stats"))

    def test_collect_process_info_new_report(self):
        """Check how the process info is collected first time."""
        mocker = Mocker()
        assert not self.worker.process_cache

        # patch Process to return our mock for test pid
        Process = mocker.mock()
        self.patch(stats_worker.psutil, 'Process', Process)
        proc = mocker.mock()
        pid = 1234
        expect(Process(pid)).result(proc)

        # patch ProcessReport to return or mock for given proc
        ProcessReport = mocker.mock()
        self.patch(stats_worker, 'ProcessReport', ProcessReport)
        proc_report = mocker.mock()
        expect(ProcessReport(proc)).result(proc_report)

        # expect to get called with some info, return some results
        name = 'test_proc'
        result = object()
        expect(proc_report.get_memory_and_cpu(prefix=name)).result(result)

        with mocker:
            real = self.worker._collect_process(pid, name)
        self.assertIdentical(real, result)

    def test_collect_process_info_old_report(self):
        """Check how the process info is collected when cached."""
        mocker = Mocker()

        # put it in the cache
        pid = 1234
        proc_report = mocker.mock()
        self.worker.process_cache[pid] = proc_report

        # expect to get called with some info, return some results
        name = 'test_proc'
        result = object()
        expect(proc_report.get_memory_and_cpu(prefix=name)).result(result)

        with mocker:
            real = self.worker._collect_process(pid, name)
        self.assertIdentical(real, result)

    def test_collect_system_info(self):
        """Check how the system info is collected."""
        mocker = Mocker()

        # change the constant to assure it's used as we want
        result1 = dict(a=3, b=5)
        result2 = dict(c=7)
        fake = (lambda: result1, lambda: result2)
        self.patch(stats_worker, 'SYSTEM_STATS', fake)

        with mocker:
            result = self.worker._collect_machine()

        should = {}
        should.update(result1)
        should.update(result2)
        self.assertEqual(result, should)

    def test_informed_metrics(self):
        """Check how stats are reported."""
        # prepare a lot of fake info that will be "collected"
        machine_info = dict(foo=3, bar=5)
        process_info = {
            1: dict(some=1234, other=4567),
            2: dict(some=9876, other=6543),
        }
        self.worker._collect_process = lambda pid, name: process_info[pid]
        self.worker._collect_machine = lambda: machine_info
        processes = [
            dict(name="proc1", group="", pid="1", state=RUNNING),
            dict(name="proc2", group="", pid="2", state=RUNNING),
        ]
        expect(self.rpc.supervisor.getAllProcessInfo()).result(processes)

        # patch the metric reporter to see what is sent
        reported = set()
        self.worker.metrics.gauge = lambda *a: reported.add(a)

        # what we should get is...
        should = set([
            ('foo', 3),
            ('bar', 5),
            ('some', 1234),
            ('other', 4567),
            ('some', 9876),
            ('other', 6543),
        ])
        with self.mocker:
            self.worker.collect_stats()
        self.assertEqual(reported, should)
 def test_noerror(self):
     """No error, no action."""
     handler = MementoHandler()
     handler.setLevel(logging.DEBUG)
     deferror_handler(dict(isError=False, message=''))
     self.assertFalse(handler.check_error("error"))
Exemple #25
0
class ClientDummyAuthTests(AuthenticationBaseTestCase):
    """Client authentication tests using the dummy auth provider."""

    auth_provider_class = DummyAuthProvider

    @defer.inlineCallbacks
    def setUp(self):
        yield super(ClientDummyAuthTests, self).setUp()
        self.creds = 'open sesame'
        self.bad_creds = 'not my secret'
        self.handler = MementoHandler()
        logger = logging.getLogger('storage.server')
        logger.addHandler(self.handler)
        self.addCleanup(logger.removeHandler, self.handler)
        self.handler.setLevel(logging.DEBUG)

    def assert_auth_ok_logging(self):
        self.assertTrue(
            self.handler.check_debug("authenticated user", "OK",
                                     self.usr0.username))
        self.assertFalse(self.handler.check_warning("missing user"))

    def assert_auth_ok_missing_user(self):
        self.assertTrue(
            self.handler.check_debug("missing user", "(id=%s)" % self.usr0.id))
        self.assertFalse(self.handler.check_info("authenticated user"))

    @defer.inlineCallbacks
    def test_auth_ok_user_ok(self):
        """Correct authentication must succeed."""
        yield self.callback_test(self.do_auth,
                                 credentials=self.creds,
                                 add_default_callbacks=True)
        self.assert_auth_ok_logging()

    @defer.inlineCallbacks
    def test_auth_ok_bad_user(self):
        """Non existing user must fail authentication."""
        # make the user getter fail
        self.patch(self.service.factory.content, 'get_user_by_id',
                   lambda *a, **k: defer.fail(DoesNotExist()))

        d = self.callback_test(self.do_auth,
                               credentials=self.creds,
                               add_default_callbacks=True)
        yield self.assertFailure(d, protocol_errors.AuthenticationFailedError)

        self.assert_auth_ok_missing_user()

    @defer.inlineCallbacks
    def test_auth_ok_with_session_id(self):
        """Correct authentication must succeed and include the session_id."""
        auth_request = yield self.callback_test(self.do_auth,
                                                credentials=self.creds,
                                                add_default_callbacks=True)

        protocol = self.service.factory.protocols[0]
        self.assertEqual(auth_request.session_id, str(protocol.session_id))

    @defer.inlineCallbacks
    def test_auth_ok_with_metadata(self):
        """Correct authentication must succeed and include metadata."""
        m_called = []
        self.service.factory.metrics.meter = lambda *a: m_called.append(a)

        metadata = {u"platform": u"linux2", u"version": u"1.0", u"foo": u"bar"}
        yield self.callback_test(self.do_auth,
                                 credentials=self.creds,
                                 metadata=metadata,
                                 add_default_callbacks=True)

        self.assertTrue(
            self.handler.check_info("Client metadata: %s" % metadata))
        self.assertIn(("client.platform.linux2", 1), m_called)
        self.assertIn(("client.version.1_0", 1), m_called)
        self.assertNotIn(("client.foo.bar", 1), m_called)

    def test_auth_fail(self):
        """Wrong secret must fail."""
        def test(client, **kwargs):
            d = self.do_auth(client, credentials=self.bad_creds)
            d.addCallbacks(
                lambda _: client.test_fail(Exception("Should not succeed.")),
                lambda _: client.test_done("ok"))

        return self.callback_test(test)

    def test_get_root(self):
        """Must receive the root after authentication."""
        @defer.inlineCallbacks
        def test(client, **kwargs):
            yield self.do_auth(client, credentials=self.creds)
            root_id = yield client.get_root()
            self.assertIsNotNone(root_id)

        return self.callback_test(test, add_default_callbacks=True)

    def test_get_root_twice(self):
        """Get root must keep the root id."""
        @defer.inlineCallbacks
        def test(client, **kwargs):
            yield self.do_auth(client, credentials=self.creds)
            root_id1 = yield client.get_root()
            root_id2 = yield client.get_root()
            self.assertEqual(root_id1, root_id2)

        return self.callback_test(test, add_default_callbacks=True)

    def test_user_becomes_inactive(self):
        """After StorageUser authentication ok it becomes inactive."""
        @defer.inlineCallbacks
        def test(client):
            """Test."""
            yield self.do_auth(client, credentials=self.creds)
            root_id = yield client.get_root()

            # create one file, should be ok
            yield client.make_file(request.ROOT, root_id, "f1")

            # cancel user subscription, so it needs
            # to get it again from the DB
            self.usr0.update(subscription=False)

            # create second file, should NOT be ok
            try:
                yield client.make_file(request.ROOT, root_id, "f2")
            except protocol_errors.DoesNotExistError:
                pass  # failed as we expected
            else:
                client.test_fail("It should have failed!")

        return self.callback_test(test, add_default_callbacks=True)
Exemple #26
0
class BaseTestCase(TestCase):
    """UI test cases for Magicicada UI."""

    kwargs = {}
    store = None
    ui_class = None

    @defer.inlineCallbacks
    def setUp(self):
        yield super(BaseTestCase, self).setUp()
        self.sd = FakedSyncdaemon()
        self.patch(syncdaemon, 'SyncDaemon', lambda: self.sd)

        self.ui = None
        if self.ui_class is not None:
            # self.ui_class is not callable, pylint: disable=E1102
            self.ui = self.ui_class(**self.kwargs)
            self.addCleanup(self.ui.destroy)

        self._called = False
        self._set_called = (lambda *args, **kwargs:
                            setattr(self, '_called', (args, kwargs)))

        if getattr(self.ui, 'logger', None) is not None:
            self.memento = MementoHandler()
            self.memento.setLevel(logging.DEBUG)
            self.ui.logger.addHandler(self.memento)
            self.ui.logger.setLevel(logging.DEBUG)
            self.addCleanup(self.ui.logger.removeHandler, self.memento)

        if getattr(self.ui, 'on_destroy', None) is not None:
            self.addCleanup(self.ui.on_destroy)

    def assert_store_correct(self, items, store=None):
        """Test that 'store' has 'items' as content."""
        if store is None:
            store = self.store
            assert store is not None, 'class must provide a store'

        msg = 'amount of rows for %s must be %s (got %s).'
        self.assertEqual(len(store), len(items),
                         msg % (store, len(items), len(store)))

        def unicodeize(elem):
            """Return the unicode repr of 'elem'."""
            if isinstance(elem, str):
                result = elem.decode('utf-8')
            else:
                result = elem
            return result

        def scan_tree(tree_iter, items):
            """Scan a whole tree."""
            msg = "row must be %r (got %r instead)"
            while tree_iter is not None:
                expected, children = items.pop()
                actual = store.get(tree_iter, *range(len(expected)))
                actual = map(unicodeize, actual)
                self.assertEqual(expected, actual,
                                 msg % (expected, actual))
                self.assertEqual(len(children),
                                 store.iter_n_children(tree_iter))

                if children:
                    child_iter = store.iter_children(tree_iter)
                    scan_tree(child_iter, children)

                tree_iter = store.iter_next(tree_iter)

        # assert rows content equal to items content
        root_iter = store.get_iter_first()
        tmp = list(reversed(items))
        scan_tree(root_iter, tmp)

    def debug_store(self):
        """Print the whole content of a store."""
        store_iter = self.store.get_iter_first()
        columns = self.store.get_n_columns()
        print '\nShowing contents of store:', self.store
        while store_iter is not None:
            print self.store.get(store_iter, *range(columns))
            store_iter = self.store.iter_next(store_iter)

    def assert_dialog_properties(self, dialog, title=None, modal=True,
                                 position=Gtk.WindowPosition.CENTER_ON_PARENT):
        """The dialog has correct properties."""
        msg = 'Must %sbe modal.'
        self.assertEqual(modal, dialog.get_modal(),
                         msg % ('' if modal else 'not '))

        actual_position = dialog.get_property('window-position')
        msg = 'dialog must have %s position (got %s instead).'
        self.assertEqual(position, actual_position,
                         msg % (position, actual_position))

        actual = dialog.get_title()
        msg = 'Title must be %r (got %r instead)'
        self.assertEqual(title, actual, msg % (title, actual))

        msg = 'Must not skip taskbar.'
        self.assertFalse(dialog.get_skip_taskbar_hint(), msg)

    def assert_function_logs(self, level, func, *args, **kwargs):
        """Check 'funcion' logs its inputs as 'level'."""
        name = func.__name__
        msg = '%s must be logged with level %r'
        try:
            func(*args, **kwargs)
        except Exception:  # pylint: disable=W0703
            self.assertTrue(self.memento.check_error(name),
                            'function (%s) must be logged as ERROR' % name)

        memento_func = getattr(self.memento, 'check_%s' % level.lower())
        self.assertTrue(memento_func(name), msg % (name, level))
        for arg in args:
            self.assertTrue(memento_func(str(arg)), msg % (arg, level))
        for key, val in kwargs.iteritems():
            arg = "'%s': %r" % (key, val)
            self.assertTrue(memento_func(arg), msg % (arg, level))

    def assert_method_called(self, obj, method, *args, **kwargs):
        """Check that obj.method(*args, **kwargs) was called."""
        self.assertEqual(getattr(obj, '_called')[method], [(args, kwargs)],
                         'Method %r was not called with the args %r and '
                         'kwargs %r' % (method, args, kwargs))

    def assert_methods_called(self, obj, methods):
        """Check that every method in 'methods' was called on 'obj'."""
        expected = dict((k, [((), {})]) for k in methods)
        self.assertEqual(getattr(obj, '_called'), expected)

    def assert_no_method_called(self, obj):
        """Check that obj.method was NOT called."""
        self.assertEqual(getattr(obj, '_called'), {})
class HeartbeatListenerTestCase(TestCase):
    """Tests for HeartbeatListener class."""

    def setUp(self):
        super(HeartbeatListenerTestCase, self).setUp()
        self.stdin = StringIO()
        self.stdout = StringIO()
        self.stderr = StringIO()
        self.mocker = Mocker()
        self.rpc = self.mocker.mock()
        self.listener = HeartbeatListener(1, 10, ['foo'], [], self.rpc,
                                          stdin=self.stdin, stdout=self.stdout,
                                          stderr=self.stderr)
        self.next_fail = {}
        self.handler = MementoHandler()
        self.listener.logger.addHandler(self.handler)
        self.listener.logger.setLevel(logging.DEBUG)
        self.handler.setLevel(logging.DEBUG)
        self.listener.logger.propagate = False
        self.processes = [dict(name="heartbeat", group="heartbeat", pid="101",
                               state=RUNNING)]
        self.handler.debug = True

    def tearDown(self):
        self.listener.logger.removeHandler(self.handler)
        self.handler.close()
        self.next_fail = None
        self.handler = None
        self.listener = None
        super(HeartbeatListenerTestCase, self).tearDown()

    def fail_next_stop(self, pname):
        """Make next stopProcess to fail."""
        expect(self.rpc.supervisor.stopProcess(pname)).throw(
            xmlrpclib.Fault(42, "Failed to stop the process."))

    def fail_next_start(self, pname):
        """Make next startProcess to fail."""
        expect(self.rpc.supervisor.startProcess(pname)).throw(
            xmlrpclib.Fault(42, "Failed to start the process."))

    def test_restart(self):
        """Test the restart method."""
        expect(self.rpc.supervisor.stopProcess("foo"))
        expect(self.rpc.supervisor.startProcess("foo"))
        with self.mocker:
            self.listener.restart("foo", "testing")
        self.assertTrue(self.handler.check_info("Restarting foo (last "
                                                "hearbeat: testing)"))

    def test_restart_fail_stop(self):
        """Test the restart method failing to stop the process."""
        self.fail_next_stop("foo")
        last = time.time()
        with self.mocker:
            try:
                self.listener.restart("foo", last)
            except xmlrpclib.Fault:
                msg = ("Failed to stop process %s (last heartbeat: %s), "
                       "exiting: %s") % \
                    ("foo", last, "<Fault 42: 'Failed to stop the process.'>")
                self.assertTrue(self.handler.check_error(msg))
            else:
                self.fail("Should get an xmlrpclib.Fault")

    def test_restart_fail_start(self):
        """Test the restart method failing to start the process."""
        expect(self.rpc.supervisor.stopProcess("foo"))
        self.fail_next_start("foo")
        last = time.time()
        with self.mocker:
            try:
                self.listener.restart("foo", last)
            except xmlrpclib.Fault:
                msg = (
                    'Failed to start process %s after stopping it, exiting: %s'
                ) % ("foo", "<Fault 42: 'Failed to start the process.'>")
                self.assertTrue(self.handler.check_error(msg))
            else:
                self.fail("Should get an xmlrpclib.Fault")

    def test_check_processes(self):
        """Test the check_processes method."""
        # add the fake process to the process list
        self.processes.append(dict(name="foo", group="foo", pid="42",
                                   state=RUNNING))
        self.processes.append(dict(name="bar", group="bar", pid="43",
                                   state=RUNNING))
        self.listener.processes = ['bar']
        # 2 process to restart
        self.listener.data['foo'] = {
            'time': time.time() - (self.listener.timeout + 2)}
        self.listener.data['bar'] = {
            'time': time.time() - (self.listener.timeout + 3)}
        self.listener.data['p-1'] = {
            'time': time.time() - (self.listener.timeout - 1)}
        expect(self.rpc.supervisor.getAllProcessInfo()).result(self.processes)
        expect(self.rpc.supervisor.stopProcess("foo:"))
        expect(self.rpc.supervisor.startProcess("foo:"))
        expect(self.rpc.supervisor.stopProcess("bar:bar"))
        expect(self.rpc.supervisor.startProcess("bar:bar"))
        with self.mocker:
            self.listener.check_processes()

    def test_check_processes_no_data(self):
        """Test the check_processes method with no data of a process."""
        # add the fake process to the process list
        self.processes.append(dict(name="foo", group="foo", pid="42",
                                   state=RUNNING))
        self.processes.append(dict(name="bar", group="bar", pid="43",
                                   state=RUNNING))
        self.listener.processes = ['bar']
        expect(self.rpc.supervisor.getAllProcessInfo()).result(self.processes)
        expect(self.rpc.supervisor.stopProcess("foo:"))
        expect(self.rpc.supervisor.startProcess("foo:"))
        expect(self.rpc.supervisor.stopProcess("bar:bar"))
        expect(self.rpc.supervisor.startProcess("bar:bar"))
        with self.mocker:
            # one process to restart
            self.listener.check_processes()
        self.assertTrue(self.handler.check_warning(
            "Restarting process foo:foo (42), as we never received a hearbeat"
            " event from it"))
        self.assertTrue(self.handler.check_warning(
            "Restarting process bar:bar (43), as we never received a hearbeat"
            " event from it"))

    def test_check_processes_untracked(self):
        """Test the check_processes method with a untracked proccess."""
        # add the fake process to the process list
        self.processes.append(dict(name="foo-untracked", group="untracked",
                                   pid="43", state=RUNNING))
        # add a new tracked process from an untracked group
        self.processes.append(dict(name="bar-untracked", group="bar", pid="44",
                                   state=RUNNING))
        self.listener.processes = ['bar']
        expect(self.rpc.supervisor.getAllProcessInfo()).result(self.processes)
        with self.mocker:
            self.listener.check_processes()
        self.assertTrue(self.handler.check_info(
            "Ignoring untracked:foo-untracked (43) as isn't tracked."))
        self.assertTrue(self.handler.check_info(
            "Ignoring bar:bar-untracked (44) as isn't tracked."))

    def test_check_processes_not_running(self):
        """Test the check_processes method if the proccess isn't running."""
        # add the fake process to the process list
        self.processes.append(dict(name="foo", group="foo", pid="42",
                                   state=states.ProcessStates.STARTING))
        # add a new tracked process from an untracked group
        self.processes.append(dict(name="bar", group="bar", pid="43",
                                   state=states.ProcessStates.STARTING))
        self.listener.processes = ['bar']
        # 2 processes to restart
        self.listener.data['foo'] = {
            'time': time.time() - (self.listener.timeout + 2)}
        self.listener.data['bar'] = {
            'time': time.time() - (self.listener.timeout + 2)}
        expect(self.rpc.supervisor.getAllProcessInfo()).result(self.processes)
        with self.mocker:
            self.listener.check_processes()
        self.assertTrue(self.handler.check_info(
            "Ignoring foo:foo (42) as isn't running."))
        self.assertTrue(self.handler.check_info(
            "Ignoring bar:bar (43) as isn't running."))

    def test_handle_heartbeat(self):
        """Test handle_heartbeat method."""
        payload = {"time": time.time()}
        self.listener.handle_heartbeat('process_name', 'group_name',
                                       '42', payload)
        info = {"pid": "42", "time": payload["time"],
                "received": self.listener.data["process_name"]["received"]}
        self.assertEqual({"process_name": info}, self.listener.data)

    def test_handle_event(self):
        """Test handle_event method."""
        # patch handle_heartbeat
        called = []

        def handle_heartbeat(process_name, group_name, pid, payload):
            """Fake handle_heartbeat."""
            called.append((process_name, group_name, pid, payload))

        self.listener.handle_heartbeat = handle_heartbeat
        payload_dict = {u"time": time.time(), "type": "heartbeat"}
        raw_data = ("processname:ticker groupname:ticker pid:42\n" +
                    json.dumps(payload_dict))
        raw_header = ("ver:3.0 server:supervisor serial:1 pool:listener "
                      "poolserial:10 eventname:PROCESS_COMMUNICATION_STDOUT"
                      " len:%s\n" % len(raw_data))
        self.stdin.write(raw_header + raw_data)
        self.stdin.seek(0)
        headers = childutils.get_headers(raw_header)
        self.listener._handle_event()
        # check
        self.assertEqual(1, len(called))
        del payload_dict['type']
        self.assertEqual(('ticker', 'ticker', '42', payload_dict), called[0])
        self.assertTrue(self.handler.check_debug(
            "Event '%s' received: %r" % (headers['eventname'], raw_data)))
        # check the stdout info
        self.assertEqual(["READY", "RESULT 2", "OK"],
                         self.stdout.getvalue().split("\n"))

    def test_invalid_event_type(self):
        """Test with an invalid type."""
        payload_dict = {u"time": time.time(), "type": "ping"}
        raw_data = 'processname:ticker groupname:ticker pid:42\n' + \
            json.dumps(payload_dict)
        raw_header = ("ver:3.0 server:supervisor serial:1 pool:listener "
                      "poolserial:10 eventname:PROCESS_COMMUNICATION_STDOUT"
                      " len:%s\n" % len(raw_data))
        self.stdin.write(raw_header + raw_data)
        self.stdin.seek(0)
        self.listener._handle_event()
        # check
        self.assertTrue(self.handler.check_error(
            "Unable to handle event type '%s' - %r" % ('ping', raw_data)))

    def test_invalid_payload(self):
        """Test with an invalid payload."""
        payload_dict = {u"time": time.time(), "type": "ping"}
        raw_data = 'processname:ticker groupname:ticker pid:42\n' + \
            json.dumps(payload_dict) + "<!foo>"
        raw_header = ("ver:3.0 server:supervisor serial:1 pool:listener "
                      "poolserial:10 eventname:PROCESS_COMMUNICATION_STDOUT"
                      " len:%s\n" % len(raw_data))
        self.stdin.write(raw_header + raw_data)
        self.stdin.seek(0)
        self.listener._handle_event()
        # check
        self.assertTrue(self.handler.check_error(
            "Unable to handle event type '%s' - %r" % ('None', raw_data)))

    def test_unhandled_event(self):
        """A unhandled event type."""
        payload_dict = {u"time": time.time(), "type": "ping"}
        raw_data = 'processname:ticker groupname:ticker pid:42\n' + \
            json.dumps(payload_dict)
        raw_header = "ver:3.0 server:supervisor serial:1 pool:heartbeat " + \
            "poolserial:1 eventname:UNKNOWN len:%s\n" % len(raw_data)
        self.stdin.write(raw_header + raw_data)
        self.stdin.seek(0)
        self.listener._handle_event()
        # check
        self.assertTrue(self.handler.check_warning(
            "Received unsupported event: %s - %r" % ('UNKNOWN', raw_data)))

    def test_check_interval(self):
        """Check that we properly check on the specified interval."""
        header = "ver:3.0 server:supervisor serial:1 pool:heartbeat " + \
                 "poolserial:1 eventname:TICK_5 len:0\n"
        expect(self.rpc.supervisor.getAllProcessInfo()).result([])
        self.stdin.write(header)
        self.stdin.seek(0)
        self.listener._handle_event()
        self.assertEqual(self.listener.tick_count, 1)
        self.stdin.seek(0)
        with self.mocker:
            self.listener._handle_event()
class OffloadQueueTestCase(TwistedTestCase):
    """Tests the OffloadQueue class."""

    def setUp(self):
        """Set up."""
        self.handler = MementoHandler()
        self.handler.setLevel(logging.DEBUG)
        logger = logging.getLogger("ubuntuone.SyncDaemon.OffloadQueue")
        logger.setLevel(logging.DEBUG)
        logger.addHandler(self.handler)
        self.addCleanup(logger.removeHandler, self.handler)
        self.oq = OffloadQueue()
        self.addCleanup(self.oq._tempfile.close)
        return super(OffloadQueueTestCase, self).setUp()

    def test_serialization_tuple(self):
        """Check that it can store tuples of strings."""
        data = ("foo", "bar")
        self.oq.push(data)
        retrieved = self.oq.pop()
        self.assertEqual(data, retrieved)

    def test_serialization_markers(self):
        """Check that it can store markers."""
        marker = MDMarker("foo")
        self.oq.push(marker)
        retrieved = self.oq.pop()
        self.assertTrue(IMarker.providedBy(retrieved))

    def test_fifo_simple(self):
        """Check FIFO queue with one silly value."""
        data = "data"
        self.oq.push(data)
        self.assertEqual(len(self.oq), 1)
        retrieved = self.oq.pop()
        self.assertEqual(data, retrieved)
        self.assertEqual(len(self.oq), 0)

    def test_fifo_double(self):
        """Check FIFO queue with two values."""
        data1, data2 = "data1", "data2"
        self.oq.push(data1)
        self.oq.push(data2)
        self.assertEqual(len(self.oq), 2)
        retrieved = self.oq.pop()
        self.assertEqual(data1, retrieved)
        self.assertEqual(len(self.oq), 1)
        retrieved = self.oq.pop()
        self.assertEqual(data2, retrieved)
        self.assertEqual(len(self.oq), 0)

    def test_fifo_mixed(self):
        """Check FIFO queue with more values."""
        data1, data2, data3 = "data1", "data2", "data3"
        self.oq.push(data1)
        self.oq.push(data2)
        self.assertEqual(data1, self.oq.pop())
        self.oq.push(data3)
        self.assertEqual(data2, self.oq.pop())
        self.assertEqual(data3, self.oq.pop())

    def test_rotate_limit_not_reached(self):
        """File does not rotate if limits are not reached."""
        orig_temp = self.oq._tempfile
        self.oq.push("data")
        self.assertEqual(self.oq._tempfile, orig_temp)
        self.oq.pop()
        self.assertEqual(self.oq._tempfile, orig_temp)

    def _get_data(self, data="data"):
        """Return data to store and it's item size in disk."""
        pickled = pickle.dumps(data, pickle.HIGHEST_PROTOCOL)
        item_size = len(pickled) + STRUCT_SIZE
        return data, item_size

    def test_rotate_soft_limit_on_push(self):
        """Rotation happens with soft limit on push."""
        # set and check rotation limits
        data, item_size = self._get_data()
        self.oq._rotation_soft_limit = item_size * 2.5
        assert self.oq._rotation_too_big_size > item_size * 10
        assert self.oq._rotation_hard_limit > item_size * 10

        # put two items, removing one so we can save space on rotation
        orig_temp = self.oq._tempfile
        self.oq.push(data)
        self.oq.pop()
        self.oq.push(data)
        self.assertEqual(self.oq._tempfile, orig_temp)

        # push another, now we're over again, but now we can save space
        self.oq.push(data)
        self.assertNotEqual(self.oq._tempfile, orig_temp)

    def test_rotate_soft_limit_on_pop(self):
        """Rotation happens with soft limit on pop."""
        # set and check rotation limits
        data, item_size = self._get_data()
        self.oq._rotation_soft_limit = item_size * 2.5
        assert self.oq._rotation_too_big_size > item_size * 10
        assert self.oq._rotation_hard_limit > item_size * 10

        # put four items
        orig_temp = self.oq._tempfile
        self.oq.push(data)
        self.oq.push(data)
        self.oq.push(data)
        self.oq.push(data)
        self.assertEqual(self.oq._tempfile, orig_temp)

        # pop the first one, we make room and still have more than soft limit
        # and min size, so we rotate
        self.oq.pop()
        self.assertNotEqual(self.oq._tempfile, orig_temp)

    def test_rotate_too_much_data(self):
        """Soft rotation doesn't happen if we have more than the max size."""
        # set and check rotation limits
        data, item_size = self._get_data()
        self.oq._rotation_soft_limit = item_size * 2.5
        self.oq._rotation_too_big_size = item_size
        assert self.oq._rotation_hard_limit > item_size * 10

        # put two items, removing one so we can save space on rotation
        orig_temp = self.oq._tempfile
        self.oq.push(data)
        self.oq.pop()
        self.oq.push(data)
        self.assertEqual(self.oq._tempfile, orig_temp)

        # push another, now we're over but we have too much data to move,
        # so rotation should not happen
        self.oq.push(data)
        self.assertEqual(self.oq._tempfile, orig_temp)

    def test_rotate_hard_limit(self):
        """We rotate on hard limit, no matter what."""
        # set and check rotation limits
        data, item_size = self._get_data()
        self.oq._rotation_soft_limit = item_size * 2.5
        self.oq._rotation_too_big_size = item_size
        self.oq._rotation_hard_limit = item_size * 3.5

        # put two items, removing one so we can save space on rotation
        orig_temp = self.oq._tempfile
        self.oq.push(data)
        self.oq.pop()
        self.oq.push(data)
        self.assertEqual(self.oq._tempfile, orig_temp)

        # push another, now we're over but we have too much data to move,
        # so rotation should not happen
        self.oq.push(data)
        self.assertEqual(self.oq._tempfile, orig_temp)

        # push another one, and check that after going over the hard limit
        # it will rotate no matter what
        self.oq.push(data)
        self.assertNotEqual(self.oq._tempfile, orig_temp)

    def test_rotate_keep_working(self):
        """Just check that all is normal after rotation."""
        data = []
        size = 0
        for i in xrange(10):
            d, s = self._get_data("data" + str(i))
            data.append(d)
            size += s
        self.oq._rotation_soft_limit = size * 0.7
        orig_temp = self.oq._tempfile

        # put one item and remove just to make it rotable
        results = []
        self.oq.push(data[0])
        results.append(self.oq.pop())

        # push the rest of the data, it should rotate at some point
        for d in data[1:]:
            self.oq.push(d)
        assert self.oq._tempfile != orig_temp

        # pop everything and compare
        while len(self.oq):
            results.append(self.oq.pop())
        self.assertEqual(data, results)

    def test_rotate_removes_old_file(self):
        """Rotation should start a new file and remove the previous one."""
        data, item_size = self._get_data()
        self.oq._rotation_soft_limit = item_size * 2.5
        orig_fname = self.oq._tempfile_name

        self.oq.push(data)
        self.oq.pop()
        self.oq.push(data)
        self.oq.push(data)

        self.assertFalse(os.path.exists(orig_fname))

    def test_log_init_tempfile(self):
        """Log the initial temp file used."""
        self.assertTrue(self.handler.check_debug("Using temporary file", repr(self.oq._tempfile_name)))

    def test_log_rotate(self):
        """Log new file in rotation."""
        data, item_size = self._get_data()
        self.oq._rotation_soft_limit = item_size * 2.5

        self.oq.push(data)
        self.oq.pop()
        self.oq.push(data)
        self.oq.push(data)

        self.assertTrue(self.handler.check_debug("Rotation into", "moving", repr(self.oq._tempfile_name)))

    def test_safe_rotate_crash(self):
        """All is ok even after rotation crashes when getting temp file."""

        def crash(*a):
            """Will crash."""
            raise NameError("ugly")

        self.patch(tempfile, "mkstemp", crash)

        # do a lot of things, rotating in the middle, checking all is ok
        self.test_rotate_keep_working()
        self.assertTrue(self.handler.check_exception(NameError))
        self.assertTrue(self.oq._in_memory)

    def test_safe_rotate_unlink(self):
        """All is ok after failing to unlink old file."""

        def crash(*a):
            """Will crash."""
            raise NameError("ugly")

        self.patch(os, "unlink", crash)

        # do a lot of things, rotating in the middle, checking all is ok
        self.test_rotate_keep_working()
        self.assertTrue(self.handler.check_warning("Error when removing old tempfile", "NameError"))

    def _test_safe_push_write(self, count):
        """Fail when pushing an item will leave it all ok."""

        class CrashingFile(StringIO.StringIO):
            """File-like object that crashes in second write."""

            def __init__(self):
                self._fail_counter = 0
                StringIO.StringIO.__init__(self)

            def write(self, *a):
                """Crashing write."""
                self._fail_counter += 1
                if self._fail_counter == count:
                    raise ValueError("broken")
                else:
                    StringIO.StringIO.write(self, *a)

        self.oq._tempfile = CrashingFile()

        # will try three items, checking all is ok
        self.test_fifo_mixed()
        self.assertTrue(self.handler.check_exception(ValueError))
        self.assertTrue(self.oq._in_memory)

    def test_safe_push_write_first(self):
        """Fail when pushing an item, on first write."""
        self._test_safe_push_write(1)

    def test_safe_push_write_second(self):
        """Fail when pushing an item, on second write."""
        self._test_safe_push_write(2)
class TestWatch(common_tests.TestWatch):
    """Test the watch so that it returns the same events as pyinotify."""

    timeout = 5

    @defer.inlineCallbacks
    def setUp(self):
        yield super(TestWatch, self).setUp()
        self.path = '/Users/username/folder'
        self.common_path = '/Users/username/folder'
        self.invalid_path = '/Users/username/path/to/not/dir'
        self.basedir = self.mktemp('test_root')
        self.mask = None
        self.stream = None
        self.memento = MementoHandler()
        self.memento.setLevel(logging.DEBUG)
        self.raw_events = []
        self.paths_checked = []
        old_is_dir = Watch._path_is_dir
        self.fake_events_processor = FakeEventsProcessor()

        def path_is_dir_wrapper(watch, path):
            """Wrapper that gets the checked paths."""
            result = old_is_dir(watch, path)
            self.paths_checked.append((path, result))
            return result

        self.patch(Watch, '_path_is_dir', path_is_dir_wrapper)

    def test_not_ignore_path(self):
        """Test that we do get the events when they do not match."""
        self.patch(
            filesystem_notifications.reactor, 'callFromThread',
            lambda x, e: x(e))
        super(TestWatch, self).test_not_ignore_path()

    def test_undo_ignore_path_ignored(self):
        """Test that we do deal with events from and old ignored path."""
        self.patch(
            filesystem_notifications.reactor, 'callFromThread',
            lambda x, e: x(e))
        super(TestWatch, self).test_not_ignore_path()

    def test_undo_ignore_path_other_ignored(self):
        """Test that we can undo and the other path is ignored."""
        self.patch(
            filesystem_notifications.reactor, 'callFromThread',
            lambda x, e: x(e))
        super(TestWatch, self).test_not_ignore_path()

    def test_mixed_ignore_path(self):
        """Test that we do get the correct events."""
        self.patch(
            filesystem_notifications.reactor, 'callFromThread',
            lambda x, e: x(e))
        super(TestWatch, self).test_mixed_ignore_path()

    def test_file_create(self):
        """Test that the correct event is returned on a file create."""
        file_name = os.path.join(self.basedir, 'test_file_create')

        def create_file():
            """Action used for the test."""
            # simply create a new file
            fd = open(file_name, 'w')
            fd.flush()
            os.fsync(fd)
            fd.close()

        events = yield self._perform_operations(
            self.basedir, self.mask, create_file, 1)
        event = events[0]
        self.assertFalse(event.dir)
        self.assertEqual(common_tests.OP_FLAGS['IN_CREATE'], event.mask)
        self.assertEqual('IN_CREATE', event.maskname)
        self.assertEqual(os.path.split(file_name)[1], event.name)
        self.assertEqual('.', event.path)
        self.assertEqual(os.path.join(self.basedir, file_name), event.pathname)
        self.assertEqual(0, event.wd)

    @defer.inlineCallbacks
    def test_dir_create(self):
        """Test that the correct event is returned on a dir creation."""
        dir_name = os.path.join(self.basedir, 'test_dir_create')

        def create_dir():
            """Action for the test."""
            os.mkdir(dir_name)

        events = yield self._perform_operations(
            self.basedir, self.mask, create_dir, 1)
        event = events[0]
        self.assertTrue(event.dir)
        self.assertEqual(
            common_tests.OP_FLAGS['IN_CREATE'] |
            common_tests.IS_DIR, event.mask)
        self.assertEqual('IN_CREATE|IN_ISDIR', event.maskname)
        self.assertEqual(os.path.split(dir_name)[1], event.name)
        self.assertEqual('.', event.path)
        self.assertEqual(os.path.join(self.basedir, dir_name), event.pathname)
        self.assertEqual(0, event.wd)

    @defer.inlineCallbacks
    def test_file_remove(self):
        """Test that the correct event is raised when a file is removed."""
        file_name = os.path.join(self.basedir, 'test_file_remove')
        # create the file before recording
        open(file_name, 'w').close()

        def remove_file():
            """Action for the test."""
            os.remove(file_name)

        events = yield self._perform_operations(self.basedir, self.mask,
                                                remove_file, 1)
        event = events[0]
        self.assertFalse(event.dir)
        self.assertEqual(common_tests.OP_FLAGS['IN_DELETE'], event.mask)
        self.assertEqual('IN_DELETE', event.maskname)
        self.assertEqual(os.path.split(file_name)[1], event.name)
        self.assertEqual('.', event.path)
        self.assertEqual(os.path.join(self.basedir, file_name), event.pathname)
        self.assertEqual(0, event.wd)

    @defer.inlineCallbacks
    def test_dir_remove(self):
        """Test that the correct event is raised when a dir is removed."""
        dir_name = os.path.join(self.basedir, 'test_dir_remove')
        # create the dir before recording
        os.mkdir(dir_name)

        def remove_dir():
            """Action for the test."""
            os.rmdir(dir_name)

        events = yield self._perform_operations(self.basedir, self.mask,
                                                remove_dir, 1)
        event = events[0]
        self.assertTrue(event.dir)
        self.assertEqual(
            common_tests.OP_FLAGS['IN_DELETE'] |
            common_tests.IS_DIR, event.mask)
        self.assertEqual('IN_DELETE|IN_ISDIR', event.maskname)
        self.assertEqual('.', event.path)
        self.assertEqual(os.path.join(self.basedir, dir_name), event.pathname)
        self.assertEqual(0, event.wd)

    @defer.inlineCallbacks
    def test_file_write(self):
        """Test that the correct event is raised when a file is written."""
        file_name = os.path.join(self.basedir, 'test_file_write')
        # clean behind us by removing the file
        self.addCleanup(os.remove, file_name)

        def write_file():
            """Action for the test."""
            # create the file before recording
            fd = open(file_name, 'w')
            fd.write('test')
            fd.close()

        events = yield self._perform_operations(self.basedir, self.mask,
                                                write_file, 1)
        event = events[0]
        self.assertFalse(event.dir)
        self.assertEqual(common_tests.OP_FLAGS['IN_CREATE'], event.mask)
        self.assertEqual('IN_CREATE', event.maskname)
        self.assertEqual(os.path.split(file_name)[1], event.name)
        self.assertEqual('.', event.path)
        self.assertEqual(os.path.join(self.basedir, file_name), event.pathname)
        self.assertEqual(0, event.wd)

    @defer.inlineCallbacks
    def test_file_moved_to_watched_dir_same_watcher(self):
        """Test that the correct event is raised when a file is moved."""
        from_file_name = os.path.join(
            self.basedir, 'test_file_moved_to_watched_dir_same_watcher')
        to_file_name = os.path.join(
            self.basedir, 'test_file_moved_to_watched_dir_same_watcher_2')
        open(from_file_name, 'w').close()
        # create file before recording

        def move_file():
            """Action for the test."""
            os.rename(from_file_name, to_file_name)

        events = yield self._perform_operations(self.basedir, self.mask,
                                                move_file, 2)
        move_from_event = events[0]
        move_to_event = events[1]
        # first test the move from
        self.assertFalse(move_from_event.dir)
        self.assertEqual(
            common_tests.OP_FLAGS['IN_MOVED_FROM'], move_from_event.mask)
        self.assertEqual('IN_MOVED_FROM', move_from_event.maskname)
        self.assertEqual(os.path.split(from_file_name)[1],
                         move_from_event.name)
        self.assertEqual('.', move_from_event.path)
        self.assertEqual(
            os.path.join(self.basedir, from_file_name),
            move_from_event.pathname)
        self.assertEqual(0, move_from_event.wd)
        # test the move to
        self.assertFalse(move_to_event.dir)
        self.assertEqual(
            common_tests.OP_FLAGS['IN_MOVED_TO'], move_to_event.mask)
        self.assertEqual('IN_MOVED_TO', move_to_event.maskname)
        self.assertEqual(os.path.split(to_file_name)[1], move_to_event.name)
        self.assertEqual('.', move_to_event.path)
        self.assertEqual(
            os.path.join(self.basedir, to_file_name), move_to_event.pathname)
        self.assertEqual(
            os.path.split(from_file_name)[1], move_to_event.src_pathname)
        self.assertEqual(0, move_to_event.wd)
        # assert that both cookies are the same
        self.assertEqual(move_from_event.cookie, move_to_event.cookie)

    @defer.inlineCallbacks
    def test_file_moved_to_not_watched_dir(self):
        """Test that the correct event is raised when a file is moved."""
        from_file_name = os.path.join(
            self.basedir, 'test_file_moved_to_not_watched_dir')
        open(from_file_name, 'w').close()

        def move_file():
            """Action for the test."""
            target = os.path.join(
                tempfile.mkdtemp(), 'test_file_moved_to_not_watched_dir')
            os.rename(from_file_name, target)

        # We need to test that we get a delete operation when moving
        # a file to an unwatched folder
        events = yield self._perform_operations(self.basedir, self.mask,
                                                move_file, 1)
        event = events[0]
        self.assertFalse(event.dir)
        self.assertEqual(common_tests.OP_FLAGS['IN_DELETE'], event.mask)
        self.assertEqual('IN_DELETE', event.maskname)
        self.assertEqual(os.path.split(from_file_name)[1], event.name)
        self.assertEqual('.', event.path)
        self.assertEqual(os.path.join(self.basedir, from_file_name),
                         event.pathname)
        self.assertEqual(0, event.wd)

    @defer.inlineCallbacks
    def test_file_move_from_not_watched_dir(self):
        """Test that the correct event is raised when a file is moved."""
        from_file_name = os.path.join(
            tempfile.mkdtemp(), 'test_file_move_from_not_watched_dir')
        to_file_name = os.path.join(
            self.basedir, 'test_file_move_from_not_watched_dir')
        # create file before we record
        open(from_file_name, 'w').close()

        def move_files():
            """Action for the test."""
            os.rename(from_file_name, to_file_name)

        # We need to test that we get a delete operation when moving
        # a file from an unwatched folder
        events = yield self._perform_operations(self.basedir, self.mask,
                                                move_files, 1)
        event = events[0]
        self.assertFalse(event.dir)
        self.assertEqual(common_tests.OP_FLAGS['IN_CREATE'], event.mask)
        self.assertEqual('IN_CREATE', event.maskname)
        self.assertEqual(os.path.split(to_file_name)[1], event.name)
        self.assertEqual('.', event.path)
        self.assertEqual(
            os.path.join(self.basedir, to_file_name), event.pathname)
        self.assertEqual(0, event.wd)

    @defer.inlineCallbacks
    def test_dir_moved_to_watched_dir_same_watcher(self):
        """Test that the correct event is raised when a dir is moved."""
        from_dir_name = os.path.join(
            self.basedir, 'test_dir_moved_to_watched_dir_same_watcher')
        to_dir_name = os.path.join(
            self.basedir, 'test_dir_moved_to_watched_dir_same_watcher_2')
        os.mkdir(from_dir_name)

        def move_file():
            """Action for the test."""
            os.rename(from_dir_name, to_dir_name)

        events = yield self._perform_operations(
            self.basedir, self.mask, move_file, 2)
        move_from_event = events[0]
        move_to_event = events[1]
        # first test the move from
        self.assertTrue(move_from_event.dir)
        self.assertEqual(
            common_tests.OP_FLAGS['IN_MOVED_FROM'] | common_tests.IS_DIR,
            move_from_event.mask)
        self.assertEqual('IN_MOVED_FROM|IN_ISDIR', move_from_event.maskname)
        self.assertEqual(os.path.split(from_dir_name)[1], move_from_event.name)
        self.assertEqual('.', move_from_event.path)
        self.assertEqual(
            os.path.join(self.basedir, from_dir_name),
            move_from_event.pathname)
        self.assertEqual(0, move_from_event.wd)
        # test the move to
        self.assertTrue(move_to_event.dir)
        self.assertEqual(
            common_tests.OP_FLAGS['IN_MOVED_TO'] | common_tests.IS_DIR,
            move_to_event.mask)
        self.assertEqual('IN_MOVED_TO|IN_ISDIR', move_to_event.maskname)
        self.assertEqual(os.path.split(to_dir_name)[1], move_to_event.name)
        self.assertEqual('.', move_to_event.path)
        self.assertEqual(
            os.path.join(self.basedir, to_dir_name), move_to_event.pathname)
        self.assertEqual(os.path.split(from_dir_name)[1],
                         move_to_event.src_pathname)
        self.assertEqual(0, move_to_event.wd)
        # assert that both cookies are the same
        self.assertEqual(move_from_event.cookie, move_to_event.cookie)

    @defer.inlineCallbacks
    def test_dir_moved_to_not_watched_dir(self):
        """Test that the correct event is raised when a file is moved."""
        dir_name = os.path.join(
            self.basedir, 'test_dir_moved_to_not_watched_dir')
        os.mkdir(dir_name)

        def move_dir():
            """Action for the test."""
            target = os.path.join(
                tempfile.mkdtemp(), 'test_dir_moved_to_not_watched_dir')
            os.rename(dir_name, target)

        # We need to test that we get a delete operation when moving
        # a file to an unwatched folder
        events = yield self._perform_operations(self.basedir, self.mask,
                                                move_dir, 1)
        event = events[0]
        self.assertTrue(event.dir)
        self.assertEqual(
            common_tests.OP_FLAGS['IN_DELETE'] | common_tests.IS_DIR,
            event.mask)
        self.assertEqual('IN_DELETE|IN_ISDIR', event.maskname)
        self.assertEqual('.', event.path)
        self.assertEqual(os.path.join(self.basedir, dir_name), event.pathname)
        self.assertEqual(0, event.wd)

    @defer.inlineCallbacks
    def test_dir_move_from_not_watched_dir(self):
        """Test that the correct event is raised when a file is moved."""
        from_dir_name = os.path.join(
            tempfile.mkdtemp(), 'test_dir_move_from_not_watched_dir')
        to_dir_name = os.path.join(
            self.basedir, 'test_dir_move_from_not_watched_dir')
        # create file before we record
        os.mkdir(from_dir_name)

        def move_dir():
            """Action for the test."""
            os.rename(from_dir_name, to_dir_name)

        events = yield self._perform_operations(self.basedir, self.mask,
                                                move_dir, 1)
        event = events[0]
        self.assertTrue(event.dir)
        self.assertEqual(
            common_tests.OP_FLAGS['IN_CREATE'] | common_tests.IS_DIR,
            event.mask)
        self.assertEqual('IN_CREATE|IN_ISDIR', event.maskname)
        self.assertEqual(os.path.split(from_dir_name)[1], event.name)
        self.assertEqual('.', event.path)
        self.assertEqual(os.path.join(self.basedir, to_dir_name),
                         event.pathname)
        self.assertEqual(0, event.wd)

    def test_exclude_filter(self):
        """Test that the exclude filter works as expectd."""
        handler = TestCaseHandler(number_events=0)
        manager = WatchManager(handler)
        self.addCleanup(manager.stop)
        # add a watch that will always exclude all actions
        manager.add_watch(
            self.basedir, self.mask, exclude_filter=lambda x: True)
        # execution the actions
        file_name = os.path.join(self.basedir, 'test_file_create')
        open(file_name, 'w').close()
        # give some time for the system to get the events
        self.assertEqual(0, len(handler.processed_events))
    test_exclude_filter.skip = "we must rethink this test."

    def test_stream_created(self):
        """Test that the stream is created."""
        def fake_call(*args, **kwargs):
            """Fake call."""

        path = '/Users/username/folder/'
        watch = Watch(1, path, None)
        self.assertEqual(
            watch.platform_watch._process_events,
            watch.platform_watch.stream.callback)
        self.assertEqual(watch.platform_watch.stream.paths, [path])
        self.assertEqual(watch.platform_watch.stream.file_events, True)

    def test_watching_property(self):
        """Test that the stopped property returns the stopped deferred."""
        path = '/Users/username/folder'
        watch = Watch(1, path, None)
        self.assertFalse(watch.watching)

    def random_error(self, *args):
        """Throw a fake exception."""
        raise common_tests.FakeException()

    def test_is_path_dir_missing_no_subdir(self):
        """Test when the path does not exist and is no a subdir."""
        path = '/Users/username/path/to/not/dir'
        test_path = self.mktemp("test_directory")
        self.patch(os.path, 'exists', lambda path: False)
        watch = Watch(1, test_path, None)
        self.assertFalse(watch._path_is_dir(path))

    def test_is_path_dir_missing_in_subdir(self):
        """Test when the path does not exist and is a subdir."""
        path = '/Users/username/path/to/not/dir'
        test_path = self.mktemp("test_directory")
        self.patch(os.path, 'exists', lambda path: False)
        watch = Watch(1, test_path, None)
        watch._subdirs.add(path)
        self.assertTrue(watch._path_is_dir(path))

    def test_is_path_dir_present_is_dir(self):
        """Test when the path is present and is dir."""
        path = '/Users/username/path/to/not/dir'
        test_path = self.mktemp("test_directory")
        self.patch(os.path, 'exists', lambda path: True)
        self.patch(os.path, 'isdir', lambda path: True)
        watch = Watch(1, test_path, None)
        watch._subdirs.add(path)
        self.assertTrue(watch._path_is_dir(path))

    def test_is_path_dir_present_no_dir(self):
        """Test when the path is present but not a dir."""
        path = '/Users/username/path/to/not/dir'
        test_path = self.mktemp("test_directory")
        self.patch(os.path, 'exists', lambda path: True)
        self.patch(os.path, 'isdir', lambda path: False)
        watch = Watch(1, test_path, None)
        watch._subdirs.add(path)
        self.assertFalse(watch._path_is_dir(path))

    def test_update_subdirs_create_not_present(self):
        """Test when we update on a create event and not present."""
        path = '/Users/username/path/to/not/dir'
        test_path = self.mktemp("test_directory")
        watch = Watch(1, test_path, None)
        watch._update_subdirs(path, REVERSE_MACOS_ACTIONS[IN_CREATE])
        self.assertTrue(path in watch._subdirs)

    def test_update_subdirs_create_present(self):
        """Test when we update on a create event and is present."""
        path = '/Users/username/path/to/not/dir'
        test_path = self.mktemp("test_directory")
        watch = Watch(1, test_path, None)
        watch._subdirs.add(path)
        old_length = len(watch._subdirs)
        watch._update_subdirs(path, REVERSE_MACOS_ACTIONS[IN_CREATE])
        self.assertTrue(path in watch._subdirs)
        self.assertEqual(old_length, len(watch._subdirs))

    def test_update_subdirs_delete_not_present(self):
        """Test when we delete and is not present."""
        path = '/Users/username/path/to/not/dir'
        test_path = self.mktemp("test_directory")
        watch = Watch(1, test_path, None)
        watch._update_subdirs(path, REVERSE_MACOS_ACTIONS[IN_DELETE])
        self.assertTrue(path not in watch._subdirs)

    def test_update_subdirs_delete_present(self):
        """Test when we delete and is present."""
        path = '/Users/username/path/to/not/dir'
        test_path = self.mktemp("test_directory")
        watch = Watch(1, test_path, None)
        watch._subdirs.add(path)
        watch._update_subdirs(path, REVERSE_MACOS_ACTIONS[IN_DELETE])
        self.assertTrue(path not in watch._subdirs)
class OSWrapperTests(test_os_helper.OSWrapperTests):
    """Tests for os wrapper functions."""

    @defer.inlineCallbacks
    def setUp(self):
        """Set up."""
        yield super(OSWrapperTests, self).setUp()
        self.handler = MementoHandler()
        self.handler.setLevel(logging.DEBUG)
        self._logger = logging.getLogger('ubuntuone.SyncDaemon')
        self._logger.addHandler(self.handler)
        self.addCleanup(self._logger.removeHandler, self.handler)

    def test_stat_symlink(self):
        """Test that it doesn't follow symlinks.

        We compare the inode only (enough to see if it's returning info
        from the link or the linked), as we can not compare the full stat
        because the st_mode will be different.
        """
        link = os.path.join(self.basedir, 'foo')
        os.symlink(self.testfile, link)
        self.assertNotEqual(os.stat(link).st_ino, stat_path(link).st_ino)
        self.assertEqual(os.lstat(link).st_ino, stat_path(link).st_ino)

    def test_movetotrash_file_bad(self):
        """Something bad happen when moving to trash, removed anyway."""
        FakeGIOFile._bad_trash_call = False   # error
        self.patch(gio, "File", FakeGIOFile)
        path = os.path.join(self.basedir, 'foo')
        open_file(path, 'w').close()
        move_to_trash(path)
        self.assertFalse(os.path.exists(path))
        self.assertTrue(self.handler.check_warning("Problems moving to trash!",
                                                   "Removing anyway", "foo"))

    def test_movetotrash_dir_bad(self):
        """Something bad happen when moving to trash, removed anyway."""
        FakeGIOFile._bad_trash_call = False   # error
        self.patch(gio, "File", FakeGIOFile)
        path = os.path.join(self.basedir, 'foo')
        os.mkdir(path)
        open_file(os.path.join(path, 'file inside directory'), 'w').close()
        move_to_trash(path)
        self.assertFalse(os.path.exists(path))
        self.assertTrue(self.handler.check_warning("Problems moving to trash!",
                                                   "Removing anyway", "foo"))

    def test_movetotrash_file_systemnotcapable(self):
        """The system is not capable of moving into trash."""
        FakeGIOFile._bad_trash_call = GIO_NOT_SUPPORTED
        self.patch(gio, "File", FakeGIOFile)
        path = os.path.join(self.basedir, 'foo')
        open_file(path, 'w').close()
        move_to_trash(path)
        self.assertFalse(os.path.exists(path))
        self.assertTrue(self.handler.check_warning("Problems moving to trash!",
                                                   "Removing anyway", "foo",
                                                   "ERROR_NOT_SUPPORTED"))

    def test_movetotrash_dir_systemnotcapable(self):
        """The system is not capable of moving into trash."""
        FakeGIOFile._bad_trash_call = GIO_NOT_SUPPORTED
        self.patch(gio, "File", FakeGIOFile)
        path = os.path.join(self.basedir, 'foo')
        os.mkdir(path)
        open_file(os.path.join(path, 'file inside directory'), 'w').close()
        move_to_trash(path)
        self.assertFalse(os.path.exists(path))
        self.assertTrue(self.handler.check_warning("Problems moving to trash!",
                                                   "Removing anyway", "foo",
                                                   "ERROR_NOT_SUPPORTED"))
class StatsWorkerTestCase(TestCase):
    """Tests for StatsWorker class."""
    def setUp(self):
        super(StatsWorkerTestCase, self).setUp()
        self.mocker = Mocker()
        self.rpc = self.mocker.mock()
        self.worker = stats_worker.StatsWorker(10, '', self.rpc)

        # logging setup
        self.handler = MementoHandler()
        self.worker.logger.addHandler(self.handler)
        self.addCleanup(self.worker.logger.removeHandler, self.handler)
        self.worker.logger.setLevel(logging.DEBUG)
        self.handler.setLevel(logging.DEBUG)
        self.worker.logger.propagate = False
        self.handler.debug = True

    def test_collect_stats(self):
        """Test the collect_stats method."""
        called = []
        self.worker._collect_process = \
            lambda p, n: called.append(('proc', p, n)) or {}
        self.worker._collect_machine = lambda: called.append('machine') or {}
        processes = [dict(name="bar", group="foo", pid="42", state=RUNNING)]
        expect(self.rpc.supervisor.getAllProcessInfo()).result(processes)
        with self.mocker:
            self.worker.collect_stats()
        self.assertEqual(called, ['machine', ('proc', 42, 'bar')])
        self.assertTrue(self.handler.check_info("Collecting machine stats"))
        self.assertTrue(
            self.handler.check_info("Collecting stats for proc", "pid=42",
                                    "name=bar"))

    def test_collect_stats_not_running(self):
        """Test the collect_stats method if the proccess isn't running."""
        called = []
        self.worker._collect_process = \
            lambda p, n: called.append(('proc', p, n)) or {}
        self.worker._collect_machine = lambda: called.append('machine') or {}
        processes = [dict(name="bar", group="foo", pid="42", state=STARTING)]
        expect(self.rpc.supervisor.getAllProcessInfo()).result(processes)
        with self.mocker:
            self.worker.collect_stats()
        self.assertEqual(called, ['machine'])
        self.assertTrue(self.handler.check_info("Collecting machine stats"))
        self.assertTrue(
            self.handler.check_info("Ignoring process", "pid=42", "name=bar",
                                    "state=%s" % STARTING))

    def test_collect_stats_no_data(self):
        """Test the collect_stats method with no data of a process."""
        called = []
        self.worker._collect_process = \
            lambda p, n: called.append(('proc', p, n)) or {}
        self.worker._collect_machine = lambda: called.append('machine') or {}
        expect(self.rpc.supervisor.getAllProcessInfo()).result([])
        with self.mocker:
            self.worker.collect_stats()
        self.assertEqual(called, ['machine'])
        self.assertTrue(self.handler.check_info("Collecting machine stats"))

    def test_collect_process_info_new_report(self):
        """Check how the process info is collected first time."""
        mocker = Mocker()
        assert not self.worker.process_cache

        # patch Process to return our mock for test pid
        Process = mocker.mock()
        self.patch(stats_worker.psutil, 'Process', Process)
        proc = mocker.mock()
        pid = 1234
        expect(Process(pid)).result(proc)

        # patch ProcessReport to return or mock for given proc
        ProcessReport = mocker.mock()
        self.patch(stats_worker, 'ProcessReport', ProcessReport)
        proc_report = mocker.mock()
        expect(ProcessReport(proc)).result(proc_report)

        # expect to get called with some info, return some results
        name = 'test_proc'
        result = object()
        expect(proc_report.get_memory_and_cpu(prefix=name)).result(result)

        with mocker:
            real = self.worker._collect_process(pid, name)
        self.assertIdentical(real, result)

    def test_collect_process_info_old_report(self):
        """Check how the process info is collected when cached."""
        mocker = Mocker()

        # put it in the cache
        pid = 1234
        proc_report = mocker.mock()
        self.worker.process_cache[pid] = proc_report

        # expect to get called with some info, return some results
        name = 'test_proc'
        result = object()
        expect(proc_report.get_memory_and_cpu(prefix=name)).result(result)

        with mocker:
            real = self.worker._collect_process(pid, name)
        self.assertIdentical(real, result)

    def test_collect_system_info(self):
        """Check how the system info is collected."""
        mocker = Mocker()

        # change the constant to assure it's used as we want
        result1 = dict(a=3, b=5)
        result2 = dict(c=7)
        fake = (lambda: result1, lambda: result2)
        self.patch(stats_worker, 'SYSTEM_STATS', fake)

        with mocker:
            result = self.worker._collect_machine()

        should = {}
        should.update(result1)
        should.update(result2)
        self.assertEqual(result, should)

    def test_informed_metrics(self):
        """Check how stats are reported."""
        # prepare a lot of fake info that will be "collected"
        machine_info = dict(foo=3, bar=5)
        process_info = {
            1: dict(some=1234, other=4567),
            2: dict(some=9876, other=6543),
        }
        self.worker._collect_process = lambda pid, name: process_info[pid]
        self.worker._collect_machine = lambda: machine_info
        processes = [
            dict(name="proc1", group="", pid="1", state=RUNNING),
            dict(name="proc2", group="", pid="2", state=RUNNING),
        ]
        expect(self.rpc.supervisor.getAllProcessInfo()).result(processes)

        # patch the metric reporter to see what is sent
        reported = set()
        self.worker.metrics.gauge = lambda *a: reported.add(a)

        # what we should get is...
        should = set([
            ('foo', 3),
            ('bar', 5),
            ('some', 1234),
            ('other', 4567),
            ('some', 9876),
            ('other', 6543),
        ])
        with self.mocker:
            self.worker.collect_stats()
        self.assertEqual(reported, should)