Exemple #1
0
    def test_hash_cleanup_listdir(self):
        file_list = []

        def mock_listdir(path):
            return list(file_list)

        def mock_unlink(path):
            file_list.remove(os.path.basename(path))

        with unit_mock({"os.listdir": mock_listdir, "os.unlink": mock_unlink}):
            # purge .data if there's a newer .ts
            file1 = normalize_timestamp(time()) + ".data"
            file2 = normalize_timestamp(time() + 1) + ".ts"
            file_list = [file1, file2]
            self.assertEquals(diskfile.hash_cleanup_listdir("/whatever"), [file2])

            # purge .ts if there's a newer .data
            file1 = normalize_timestamp(time()) + ".ts"
            file2 = normalize_timestamp(time() + 1) + ".data"
            file_list = [file1, file2]
            self.assertEquals(diskfile.hash_cleanup_listdir("/whatever"), [file2])

            # keep .meta and .data if meta newer than data
            file1 = normalize_timestamp(time()) + ".ts"
            file2 = normalize_timestamp(time() + 1) + ".data"
            file3 = normalize_timestamp(time() + 2) + ".meta"
            file_list = [file1, file2, file3]
            self.assertEquals(diskfile.hash_cleanup_listdir("/whatever"), [file3, file2])

            # keep only latest of multiple .ts files
            file1 = normalize_timestamp(time()) + ".ts"
            file2 = normalize_timestamp(time() + 1) + ".ts"
            file3 = normalize_timestamp(time() + 2) + ".ts"
            file_list = [file1, file2, file3]
            self.assertEquals(diskfile.hash_cleanup_listdir("/whatever"), [file3])
Exemple #2
0
    def test_get_hashes_unmodified(self):
        df = diskfile.DiskFile(self.devices, "sda", "0", "a", "c", "o", FakeLogger())
        mkdirs(df.datadir)
        with open(os.path.join(df.datadir, normalize_timestamp(time()) + ".ts"), "wb") as f:
            f.write("1234567890")
        part = os.path.join(self.objects, "0")
        hashed, hashes = diskfile.get_hashes(part)
        i = [0]

        def _getmtime(filename):
            i[0] += 1
            return 1

        with unit_mock({"swift.obj.diskfile.getmtime": _getmtime}):
            hashed, hashes = diskfile.get_hashes(part, recalculate=["a83"])
        self.assertEquals(i[0], 2)
Exemple #3
0
    def test_get_hashes_unmodified(self):
        df = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o')
        mkdirs(df._datadir)
        with open(
                os.path.join(df._datadir,
                             normalize_timestamp(time()) + '.ts'), 'wb') as f:
            f.write('1234567890')
        part = os.path.join(self.objects, '0')
        hashed, hashes = diskfile.get_hashes(part)
        i = [0]

        def _getmtime(filename):
            i[0] += 1
            return 1

        with unit_mock({'swift.obj.diskfile.getmtime': _getmtime}):
            hashed, hashes = diskfile.get_hashes(part, recalculate=['a83'])
        self.assertEquals(i[0], 2)
Exemple #4
0
    def test_get_hashes_unmodified(self):
        df = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o')
        mkdirs(df._datadir)
        with open(
                os.path.join(df._datadir,
                             normalize_timestamp(time()) + '.ts'),
                'wb') as f:
            f.write('1234567890')
        part = os.path.join(self.objects, '0')
        hashed, hashes = diskfile.get_hashes(part)
        i = [0]

        def _getmtime(filename):
            i[0] += 1
            return 1
        with unit_mock({'swift.obj.diskfile.getmtime': _getmtime}):
            hashed, hashes = diskfile.get_hashes(
                part, recalculate=['a83'])
        self.assertEquals(i[0], 2)
Exemple #5
0
    def test_get_hashes_unmodified(self):
        df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
        mkdirs(df.datadir)
        with open(
                os.path.join(df.datadir,
                             normalize_timestamp(time.time()) + '.ts'),
                'wb') as f:
            f.write('1234567890')
        part = os.path.join(self.objects, '0')
        hashed, hashes = object_base.get_hashes(part)
        i = [0]

        def getmtime(filename):
            i[0] += 1
            return 1

        with unit_mock({'os.path.getmtime': getmtime}):
            hashed, hashes = object_base.get_hashes(part, recalculate=['a83'])
        self.assertEquals(i[0], 2)
Exemple #6
0
    def test_get_hashes_unmodified_and_zero_bytes(self):
        df = diskfile.DiskFile(self.devices, "sda", "0", "a", "c", "o", FakeLogger())
        mkdirs(df.datadir)
        part = os.path.join(self.objects, "0")
        open(os.path.join(part, diskfile.HASH_FILE), "w")
        # Now the hash file is zero bytes.
        i = [0]

        def _getmtime(filename):
            i[0] += 1
            return 1

        with unit_mock({"swift.obj.diskfile.getmtime": _getmtime}):
            hashed, hashes = diskfile.get_hashes(part, recalculate=[])
        # getmtime will actually not get called.  Initially, the pickle.load
        # will raise an exception first and later, force_rewrite will
        # short-circuit the if clause to determine whether to write out a
        # fresh hashes_file.
        self.assertEquals(i[0], 0)
        self.assertTrue("a83" in hashes)
Exemple #7
0
    def test_get_hashes_unmodified_and_zero_bytes(self):
        df = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o')
        mkdirs(df._datadir)
        part = os.path.join(self.objects, '0')
        open(os.path.join(part, diskfile.HASH_FILE), 'w')
        # Now the hash file is zero bytes.
        i = [0]

        def _getmtime(filename):
            i[0] += 1
            return 1
        with unit_mock({'swift.obj.diskfile.getmtime': _getmtime}):
            hashed, hashes = diskfile.get_hashes(
                part, recalculate=[])
        # getmtime will actually not get called.  Initially, the pickle.load
        # will raise an exception first and later, force_rewrite will
        # short-circuit the if clause to determine whether to write out a
        # fresh hashes_file.
        self.assertEquals(i[0], 0)
        self.assertTrue('a83' in hashes)
Exemple #8
0
    def test_get_hashes_unmodified_and_zero_bytes(self):
        df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
        mkdirs(df.datadir)
        part = os.path.join(self.objects, '0')
        open(os.path.join(part, object_base.HASH_FILE), 'w')
        # Now the hash file is zero bytes.
        i = [0]

        def getmtime(filename):
            i[0] += 1
            return 1

        with unit_mock({'os.path.getmtime': getmtime}):
            hashed, hashes = object_base.get_hashes(part, recalculate=[])
        # getmtime will actually not get called.  Initially, the pickle.load
        # will raise an exception first and later, force_rewrite will
        # short-circuit the if clause to determine whether to write out a fresh
        # hashes_file.
        self.assertEquals(i[0], 0)
        self.assertTrue('a83' in hashes)
Exemple #9
0
    def test_hash_cleanup_listdir(self):
        file_list = []

        def mock_listdir(path):
            return list(file_list)

        def mock_unlink(path):
            file_list.remove(os.path.basename(path))

        with unit_mock({'os.listdir': mock_listdir, 'os.unlink': mock_unlink}):
            # purge .data if there's a newer .ts
            file1 = normalize_timestamp(time()) + '.data'
            file2 = normalize_timestamp(time() + 1) + '.ts'
            file_list = [file1, file2]
            self.assertEquals(diskfile.hash_cleanup_listdir('/whatever'),
                              [file2])

            # purge .ts if there's a newer .data
            file1 = normalize_timestamp(time()) + '.ts'
            file2 = normalize_timestamp(time() + 1) + '.data'
            file_list = [file1, file2]
            self.assertEquals(diskfile.hash_cleanup_listdir('/whatever'),
                              [file2])

            # keep .meta and .data if meta newer than data
            file1 = normalize_timestamp(time()) + '.ts'
            file2 = normalize_timestamp(time() + 1) + '.data'
            file3 = normalize_timestamp(time() + 2) + '.meta'
            file_list = [file1, file2, file3]
            self.assertEquals(diskfile.hash_cleanup_listdir('/whatever'),
                              [file3, file2])

            # keep only latest of multiple .ts files
            file1 = normalize_timestamp(time()) + '.ts'
            file2 = normalize_timestamp(time() + 1) + '.ts'
            file3 = normalize_timestamp(time() + 2) + '.ts'
            file_list = [file1, file2, file3]
            self.assertEquals(diskfile.hash_cleanup_listdir('/whatever'),
                              [file3])