Exemplo n.º 1
0
 def setUp(self):
     self.app = proxy_server.Application(None, FakeMemcache(),
                                         account_ring=FakeAccountRing(''),
                                         container_ring=FakeContainerRing(''),
                                         object_ring=FakeObjectRing(''),
                                         logger=FakeLogger())
     self.base = Controller(self.app)
Exemplo n.º 2
0
 def setUp(self):
     self.tmpdir = mkdtemp()
     self.testdir = os.path.join(self.tmpdir,
                                 'test_object_server_disk_file_mgr')
     mkdirs(os.path.join(self.testdir, "export", "fs1"))
     mkdirs(os.path.join(self.testdir, "export", "fs1"))
     self.filesystems = os.path.join(os.path.join(self.testdir, "export"))
     self._orig_tpool_exc = tpool.execute
     tpool.execute = lambda f, *args, **kwargs: f(*args, **kwargs)
     self.conf = dict(filesystems=self.filesystems,
                      mount_check='false',
                      keep_cache_size=2 * 1024)
     self.df_mgr = diskfile.DiskFileManager(self.conf, FakeLogger())
    def setUp(self):
        class FakeApp:
            def __init__(self):
                pass

        conf = {
            'bandwidth_controlled_unit_time_period': 60,
            'bandwidth_control': 'Enable',
            'bandwidth_controlled_account_limit': 1024,
            'default_max_bandwidth_read_limit': 100,
            'default_max_bandwidth_write_limit': 100,
        }
        app = FakeApp()
        self.controller = BWController(app, conf, FakeLogger(''))
 def setUp(self):
     """Set up for testing osd.object.server.ObjectController"""
     utils.HASH_PATH_SUFFIX = 'endcap'
     utils.HASH_PATH_PREFIX = 'startcap'
     self.testdir = \
         os.path.join(mkdtemp(), 'tmp_test_object_server_ObjectController')
     self.filesystems = os.path.join(os.path.join(self.testdir, "export"))
     self.filesystem = "fs1"
     self.dir = "o1"
     mkdirs(os.path.join(self.filesystems, self.filesystem))
     mkdirs(os.path.join(self.filesystems, self.filesystem))
     conf = {'filesystems': self.filesystems, 'mount_check': 'false'}
     self.object_controller = self.create_instance(conf)
     self.object_controller.bytes_per_sync = 1
     self._orig_tpool_exc = tpool.execute
     tpool.execute = lambda f, *args, **kwargs: f(*args, **kwargs)
     self.df_mgr = diskfile.DiskFileManager(conf,
                                            self.object_controller.logger)
     self.lock_mgr_obj = LockManager(FakeLogger(''))
Exemplo n.º 5
0
 def __get_account_updater_instance(self):
     if TestContainerUpdater.ACC_CALLED:
         return TestContainerUpdater.ACC_UPDATER
     else:
         TestContainerUpdater.ACC_UPDATER = account_updater.AccountUpdater(
                                                        {'interval': '1',
                                                        'node_timeout': '5',
                                                       'conn_timeout': '.5'}, logger = FakeLogger())
   
         TestContainerUpdater.ACC_CALLED = True
         return TestContainerUpdater.ACC_UPDATER
 def setUp(self):
     self.cache = BWInfoCache(2, 123, 60, FakeLogger())
     self.cache.add_new_key('a', 10, 10)
Exemplo n.º 7
0
    def _get_open_disk_file(self,
                            invalid_type=None,
                            obj_name='o',
                            fsize=1024,
                            csize=8,
                            mark_deleted=False,
                            prealloc=False,
                            ts=None,
                            mount_check=False,
                            extra_metadata=None):
        '''returns a DiskFile'''
        df = self._simple_get_diskfile(obj=obj_name)
        data = '0' * fsize
        etag = md5()
        if ts is not None:
            timestamp = ts
        else:
            timestamp = normalize_timestamp(time())
        with df.create() as writer:
            upload_size = writer.write(data)
            etag.update(data)
            etag = etag.hexdigest()
            metadata = {
                'ETag': etag,
                'X-Timestamp': timestamp,
                'Content-Length': str(upload_size),
            }
            metadata.update(extra_metadata or {})
            writer.put(metadata)
            if invalid_type == 'ETag':
                etag = md5()
                etag.update('1' + '0' * (fsize - 1))
                etag = etag.hexdigest()
                metadata['ETag'] = etag
                diskfile.write_metadata(writer._fd_meta, metadata)
            elif invalid_type == 'Content-Length':
                metadata['Content-Length'] = fsize - 1
                diskfile.write_metadata(writer._fd_meta, metadata)
            elif invalid_type == 'Bad-Content-Length':
                metadata['Content-Length'] = 'zero'
                diskfile.write_metadata(writer._fd_meta, metadata)
            elif invalid_type == 'Missing-Content-Length':
                del metadata['Content-Length']
                diskfile.write_metadata(writer._fd_meta, metadata)
            elif invalid_type == 'Bad-X-Delete-At':
                metadata['X-Delete-At'] = 'bad integer'
                diskfile.write_metadata(writer._fd_meta, metadata)

        if mark_deleted:
            df.delete(timestamp)

        data_file = [
            os.path.join(df._datadir, fname)
            for fname in sorted(os.listdir(df._datadir), reverse=True)
            if fname.endswith('.data')
        ]
        meta_file = [
            os.path.join(df._metadir, fname)
            for fname in sorted(os.listdir(df._metadir), reverse=True)
            if fname.endswith('.meta')
        ]
        ''' if invalid_type == 'Corrupt-Xattrs':
            # We have to go below read_metadata/write_metadata to get proper
            # corruption.
            meta_xattr = open(meta_file,'rb').read()
            wrong_byte = 'X' if meta_xattr[0] != 'X' else 'Y'
            xattr.setxattr(data_files[0], "user.osd.metadata",
                           wrong_byte + meta_xattr[1:])
        elif invalid_type == 'Truncated-Xattrs':
            meta_xattr = xattr.getxattr(data_files[0], "user.osd.metadata")
            xattr.setxattr(data_files[0], "user.osd.metadata",
                           meta_xattr[:-1])
        '''
        if invalid_type == 'Missing-Name':
            with open(meta_file, 'r') as fd:
                md = diskfile.read_metadata(fd)
                del md['name']
            fd = os.open(meta_file, os.O_WRONLY | os.O_TRUNC)
            diskfile.write_metadata(fd, md)
        elif invalid_type == 'Bad-Name':
            with open(meta_file, 'r') as fd:
                md = diskfile.read_metadata(fd)
                md['name'] = md['name'] + 'garbage'
            fd = os.open(meta_file, os.O_WRONLY | os.O_TRUNC)
            diskfile.write_metadata(fd, md)

        self.conf['disk_chunk_size'] = csize
        self.conf['mount_check'] = mount_check
        self.df_mgr = diskfile.DiskFileManager(self.conf, FakeLogger())
        df = self._simple_get_diskfile(obj=obj_name)
        df.open()
        if invalid_type == 'Zero-Byte':
            fp = open(df._data_file, 'w')
            fp.close()
        df.unit_test_len = fsize
        return df