def test_called_back_error(self):
        """Test that the hasher signals error when no file."""
        # create the hasher
        mark = object()
        queue = hash_queue.UniqueQueue()
        d = defer.Deferred()
        class Helper(object):
            """Helper class."""
            def push(self, event, **kwargs):
                """Callback."""
                d.callback((event, kwargs))
        receiver = Helper()
        hasher = hash_queue._Hasher(queue, mark, receiver)

        # send what to hash
        item = (("not_to_be_found", "foo"), FAKE_TIMESTAMP)
        queue.put(item)

        # start the hasher after putting the work items
        hasher.start()

        def check_info((event, kwargs)):
            """Check the info pushed by the hasher."""
            hasher.stop()
            hasher.join(timeout=5)
            self.assertEqual(event, "HQ_HASH_ERROR")
            self.assertEqual(kwargs['mdid'], "foo")


        d.addCallback(check_info)
        # release the processor and check
        return d
    def test_open_file_with_rb(self):
        """Check that the file to hash is opened with 'rb' mode."""
        called = []

        orig = hash_queue.open_file
        def faked_open_file(*a):
            called.append(a)
            return orig(*a)

        self.patch(hash_queue, 'open_file', faked_open_file)

        queue = hash_queue.UniqueQueue()
        testfile = os.path.join(self.test_dir, "testfile")
        with open_file(testfile, "wb") as fh:
            fh.write("foobar")
        item = ((testfile, "mdid"), FAKE_TIMESTAMP)
        queue.put(item)

        d = defer.Deferred()
        eq = FakeEventQueue(d)

        hasher = hash_queue._Hasher(queue=queue, end_mark='end-mark',
                                    event_queue=eq)
        # start the hasher after putting the work items
        hasher.start()

        yield d
        hasher.stop()

        self.assertEqual(called, [(testfile, 'rb')])
    def test_unique(self):
        """The hasher should return in order."""
        # calculate what we should receive
        should_be = []
        for i in range(10):
            hasher = content_hash_factory()
            text = "supercalifragilistico"+str(i)
            hasher.hash_object.update(text)
            tfile = os.path.join(self.test_dir, "tfile"+str(i))
            with open_file(tfile, "wb") as fh:
                fh.write("supercalifragilistico"+str(i))
            d = dict(path=tfile, hash=hasher.content_hash(),
                     crc32=crc32(text), size=len(text), stat=stat_path(tfile))
            should_be.append(("HQ_HASH_NEW", d))

        d = defer.Deferred()
        class Helper(object):
            """Helper class."""
            # class-closure, cannot use self, pylint: disable-msg=E0213
            def __init__(innerself):
                innerself.store = []
            def push(innerself, event, **kwargs):
                """Callback."""
                innerself.store.append((event, kwargs))
                if len(innerself.store) == 10:
                    if innerself.store == should_be:
                        d.callback(True)
                    else:
                        d.errback(Exception("are different!"))

        receiver = Helper()

        hq = hash_queue.HashQueue(receiver)
        self.addCleanup(hq.shutdown)
        # stop the hasher so we can test the unique items in the queue
        hq.hasher.stop()
        self.log.debug('Hasher stopped (forced)')
        # allow the hasher to fully stop
        time.sleep(0.1)
        # create a new hasher just like the HashQueue creates it
        hq.hasher = hash_queue._Hasher(hq._queue, hq._end_mark, receiver)
        hq.hasher.setDaemon(True)

        # send to hash twice
        for i in range(10):
            tfile = os.path.join(self.test_dir, "tfile"+str(i))
            hq.insert(tfile, "mdid")
            hq.insert(tfile, "mdid")
        # start the hasher
        self.log.debug('Hasher started (forced)')
        hq.hasher.start()
        # insert the last item to check the uniqueness in the queue while
        # the hasher is running
        for i in range(9, 10):
            tfile = os.path.join(self.test_dir, "tfile"+str(i))
            hq.insert(tfile, "mdid")
        return d
    def test_order(self):
        """The hasher should return in order."""
        # calculate what we should receive
        should_be = []
        for i in range(10):
            hasher = content_hash_factory()
            text = "supercalifragilistico"+str(i)
            hasher.hash_object.update(text)
            tfile = os.path.join(self.test_dir, "tfile"+str(i))
            with open_file(tfile, "wb") as fh:
                fh.write("supercalifragilistico"+str(i))
            d = dict(path=tfile, hash=hasher.content_hash(),
                     crc32=crc32(text), size=len(text), stat=stat_path(tfile))
            should_be.append(("HQ_HASH_NEW", d))

        # create the hasher
        mark = object()
        queue = hash_queue.UniqueQueue()
        d = defer.Deferred()

        class Helper(object):
            """Helper class."""
            # class-closure, cannot use self, pylint: disable-msg=E0213
            def __init__(innerself):
                innerself.store = []
            def push(innerself, event, **kwargs):
                """Callback."""
                innerself.store.append((event, kwargs))
                if len(innerself.store) == 10:
                    hasher.stop()
                    hasher.join(timeout=5)
                    if innerself.store == should_be:
                        d.callback(True)
                    else:
                        d.errback(Exception("are different!"))
        receiver = Helper()

        hasher = hash_queue._Hasher(queue, mark, receiver)

        # send what to hash
        for i in range(10):
            tfile = os.path.join(self.test_dir, "tfile"+str(i))
            item = ((tfile, "mdid"), FAKE_TIMESTAMP)
            queue.put(item)

        # start the hasher after putting the work items
        hasher.start()

        return d
    def test_called_back_ok(self):
        """Test that the hasher produces correct info."""
        # create the hasher
        mark = object()
        queue = hash_queue.UniqueQueue()
        d = defer.Deferred()
        class Helper(object):
            """Helper class."""
            def push(self, event, **kwargs):
                """Callback."""
                d.callback((event, kwargs))
        receiver = Helper()
        hasher = hash_queue._Hasher(queue, mark, receiver)

        # send what to hash
        testfile = os.path.join(self.test_dir, "testfile")
        with open_file(testfile, "wb") as fh:
            fh.write("foobar")
        item = ((testfile, "mdid"), FAKE_TIMESTAMP)
        queue.put(item)

        # start the hasher after putting the work items
        hasher.start()

        def check_info((event, kwargs)):
            """check the info pushed by the hasher"""
            # pylint: disable-msg=W0612
            hasher.stop()
            hasher.join(timeout=5)
            self.assertEqual(event, "HQ_HASH_NEW")
            # calculate what we should receive
            realh = content_hash_factory()
            realh.hash_object.update("foobar")
            should_be = realh.content_hash()
            curr_stat = stat_path(testfile)
            self.assertEquals(should_be, kwargs['hash'])
            for attr in ('st_mode', 'st_ino', 'st_dev', 'st_nlink', 'st_uid',
                         'st_gid', 'st_size', 'st_ctime', 'st_mtime'):
                self.assertEquals(getattr(curr_stat, attr),
                                  getattr(kwargs['stat'], attr))


        d.addCallback(check_info)
        # release the processor and check
        return d
    def test_called_back_log_ok(self):
        """Test that the hasher produces correct info."""
        # create the hasher
        mark = object()
        queue = hash_queue.UniqueQueue()
        d = defer.Deferred()
        class Helper(object):
            """Helper class."""
            def push(self, event, **kwargs):
                """Callback."""
                d.callback(kwargs)
        receiver = Helper()
        hasher = hash_queue._Hasher(queue, mark, receiver)

        # log config
        handler = MementoHandler()
        handler.setLevel(logging.DEBUG)
        hasher.logger.addHandler(handler)

        # send what to hash
        testfile = os.path.join(self.test_dir, "testfile")
        with open_file(testfile, "wb") as fh:
            fh.write("foobar")
        item = ((testfile, "mdid"), FAKE_TIMESTAMP)
        queue.put(item)

        # start the hasher after putting the work items
        hasher.start()

        # wait event and stop hasher
        yield d
        hasher.stop()
        hasher.join(timeout=5)

        # check log
        log_msg = [r.message for r in handler.records
                   if "path hash pushed" in r.message][0]
        self.assertTrue("path" in log_msg)
        self.assertTrue("hash" in log_msg)
        self.assertTrue("crc" in log_msg)
        self.assertTrue("size" in log_msg)
        self.assertTrue("st_ino" in log_msg)
        self.assertTrue("st_size" in log_msg)
        self.assertTrue("st_mtime" in log_msg)
        hasher.logger.removeHandler(handler)
    def test_large_content(self):
        """The hasher works ok for a lot of info."""
        # calculate what we should receive
        testinfo = "".join(chr(random.randint(0, 255)) for i in range(100000))
        hasher = content_hash_factory()
        hasher.hash_object.update(testinfo)
        testfile = os.path.join(self.test_dir, "testfile")
        testhash = hasher.content_hash()

        # create the hasher
        mark = object()
        queue = hash_queue.UniqueQueue()

        d = defer.Deferred()

        class Helper(object):
            """Helper class."""
            def push(self, event, path, hash, crc32, size, stat):
                """callback"""
                hasher.stop()
                hasher.join(timeout=5)
                if event != "HQ_HASH_NEW":
                    d.errback(Exception("envent is not HQ_HASH_NEW"))
                elif path != testfile:
                    d.errback(Exception("path is not the original one"))
                elif hash != testhash:
                    d.errback(Exception("the hashes are different!"))
                else:
                    d.callback(True)
        receiver = Helper()
        hasher = hash_queue._Hasher(queue, mark, receiver)
        # send what to hash
        with open_file(testfile, "wb") as fh:
            fh.write(testinfo)
        item = ((testfile, "mdid"), FAKE_TIMESTAMP)
        queue.put(item)

        # start the hasher after putting the work items
        hasher.start()

        return d
    def setUpWithCount(self, expected_events=1):
        """Initialize this test instance."""
        self.test_dir = self.mktemp('test_dir')
        self.fake_time = FakeTimeModule()
        self.patch(hash_queue, "time", self.fake_time)
        self.queue = hash_queue.UniqueQueue()

        self.testfile = os.path.join(self.test_dir, "testfile")
        with open_file(self.testfile, "wb") as fh:
            fh.write("foobar")

        self.event_d = defer.Deferred()
        self.eq = FakeEventQueue(self.event_d, expected_events)
        self.hasher = hash_queue._Hasher(queue=self.queue, end_mark='end-mark',
                                    event_queue=self.eq)

        def stop_hasher():
            """Safely stop the hasher."""
            self.hasher.stop()
            self.hasher.join(timeout=5)
            self.assertFalse(self.hasher.isAlive())

        self.addCleanup(stop_hasher)
    def test_live_process(self):
        """Check that the hasher lives and dies."""
        # create the hasher
        mark = object()
        queue = hash_queue.UniqueQueue()
        class Helper(object):
            """Helper class."""
            def push(self, event, **kwargs):
                """Callback."""
        receiver = Helper()
        hasher = hash_queue._Hasher(queue, mark, receiver)
        hasher.start()

        # it's aliveeeeeeee!
        self.assertTrue(hasher.isAlive())

        # stop it, and release the processor to let the other thread run
        hasher.stop()
        time.sleep(.1)

        # "I see dead threads"
        self.assertFalse(hasher.isAlive())
        hasher.join(timeout=5)