Beispiel #1
0
    def test_invalidate_hash(self):

        def assertFileData(file_path, data):
            with open(file_path, 'r') as fp:
                fdata = fp.read()
                self.assertEquals(pickle.loads(fdata), pickle.loads(data))

        df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
        mkdirs(df.datadir)
        ohash = hash_path('a', 'c', 'o')
        data_dir = ohash[-3:]
        whole_path_from = os.path.join(self.objects, '0', data_dir)
        hashes_file = os.path.join(self.objects, '0',
                                   object_replicator.HASH_FILE)
        # test that non existant file except caught
        self.assertEquals(object_replicator.invalidate_hash(whole_path_from),
                          None)
        # test that hashes get cleared
        check_pickle_data = pickle.dumps({data_dir: None},
                                         object_replicator.PICKLE_PROTOCOL)
        for data_hash in [{data_dir: None}, {data_dir: 'abcdefg'}]:
            with open(hashes_file, 'wb') as fp:
                pickle.dump(data_hash, fp, object_replicator.PICKLE_PROTOCOL)
            object_replicator.invalidate_hash(whole_path_from)
            assertFileData(hashes_file, check_pickle_data)
Beispiel #2
0
 def test_object_audit_no_meta(self):
     timestamp = str(normalize_timestamp(time.time()))
     path = os.path.join(self.disk_file.datadir, timestamp + '.data')
     mkdirs(self.disk_file.datadir)
     fp = open(path, 'w')
     fp.write('0' * 1024)
     fp.close()
     invalidate_hash(os.path.dirname(self.disk_file.datadir))
     self.auditor = auditor.AuditorWorker(self.conf)
     pre_quarantines = self.auditor.quarantines
     self.auditor.object_audit(
         os.path.join(self.disk_file.datadir, timestamp + '.data'), 'sda',
         '0')
     self.assertEquals(self.auditor.quarantines, pre_quarantines + 1)
Beispiel #3
0
 def test_object_audit_no_meta(self):
     timestamp = str(normalize_timestamp(time.time()))
     path = os.path.join(self.disk_file.datadir, timestamp + '.data')
     mkdirs(self.disk_file.datadir)
     fp = open(path, 'w')
     fp.write('0' * 1024)
     fp.close()
     invalidate_hash(os.path.dirname(self.disk_file.datadir))
     self.auditor = auditor.AuditorWorker(self.conf)
     pre_quarantines = self.auditor.quarantines
     self.auditor.object_audit(
         os.path.join(self.disk_file.datadir, timestamp + '.data'),
         'sda', '0')
     self.assertEquals(self.auditor.quarantines, pre_quarantines + 1)
Beispiel #4
0
    def put(self, fd, tmppath, metadata, extension='.data'):
        """
        Finalize writing the file on disk, and renames it from the temp file to
        the real location.  This should be called after the data has been
        written to the temp file.

        :params fd: file descriptor of the temp file
        :param tmppath: path to the temporary file being used
        :param metadata: dictionary of metadata to be written
        :param extention: extension to be used when making the file
        """
        metadata['name'] = self.name
        timestamp = normalize_timestamp(metadata['X-Timestamp'])
        write_metadata(fd, metadata)
        if 'Content-Length' in metadata:
            self.drop_cache(fd, 0, int(metadata['Content-Length']))
        tpool.execute(os.fsync, fd)
        invalidate_hash(os.path.dirname(self.datadir))
        renamer(tmppath, os.path.join(self.datadir, timestamp + extension))
        self.metadata = metadata
Beispiel #5
0
    def put(self, fd, tmppath, metadata, extension='.data'):
        """
        Finalize writing the file on disk, and renames it from the temp file to
        the real location.  This should be called after the data has been
        written to the temp file.

        :params fd: file descriptor of the temp file
        :param tmppath: path to the temporary file being used
        :param metadata: dictionary of metadata to be written
        :param extention: extension to be used when making the file
        """
        metadata['name'] = self.name
        timestamp = normalize_timestamp(metadata['X-Timestamp'])
        write_metadata(fd, metadata)
        if 'Content-Length' in metadata:
            self.drop_cache(fd, 0, int(metadata['Content-Length']))
        tpool.execute(os.fsync, fd)
        invalidate_hash(os.path.dirname(self.datadir))
        renamer(tmppath, os.path.join(self.datadir, timestamp + extension))
        self.metadata = metadata
Beispiel #6
0
    def test_invalidate_hash(self):
        def assertFileData(file_path, data):
            with open(file_path, 'r') as fp:
                fdata = fp.read()
                self.assertEquals(pickle.loads(fdata), pickle.loads(data))

        df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
        mkdirs(df.datadir)
        ohash = hash_path('a', 'c', 'o')
        data_dir = ohash[-3:]
        whole_path_from = os.path.join(self.objects, '0', data_dir)
        hashes_file = os.path.join(self.objects, '0',
                                   object_replicator.HASH_FILE)
        # test that non existant file except caught
        self.assertEquals(object_replicator.invalidate_hash(whole_path_from),
                          None)
        # test that hashes get cleared
        check_pickle_data = pickle.dumps({data_dir: None},
                                         object_replicator.PICKLE_PROTOCOL)
        for data_hash in [{data_dir: None}, {data_dir: 'abcdefg'}]:
            with open(hashes_file, 'wb') as fp:
                pickle.dump(data_hash, fp, object_replicator.PICKLE_PROTOCOL)
            object_replicator.invalidate_hash(whole_path_from)
            assertFileData(hashes_file, check_pickle_data)