def test_mem_mem(self): self.container = InMemoryEntityContainer(self.cdef) self.bs = blockstore.EDMBlockStore(entity_set=self.cdef['Blocks'], max_block_size=self.block_size) self.ls = blockstore.LockStore(entity_set=self.cdef['BlockLocks']) self.ss = blockstore.StreamStore(bs=self.bs, ls=self.ls, entity_set=self.cdef['Streams']) self.random_rw()
def test_context(self): # every lock/unlock pair needs a context object ls = blockstore.LockStore(entity_set=self.cdef['BlockLocks']) hash_key = hashlib.sha256(b'Lockme').hexdigest() with ls.lock(hash_key): # do something pass try: ls.lock(hash_key, timeout=2) except blockstore.LockError: self.fail("Context manager failed to unlock")
def test_sql_sql(self): self.container = BlockStoreContainer(container=self.cdef, file_path=str( self.d.join('blockstore.db'))) self.container.create_all_tables() self.bs = blockstore.EDMBlockStore(entity_set=self.cdef['Blocks'], max_block_size=self.block_size) self.ls = blockstore.LockStore(entity_set=self.cdef['BlockLocks']) self.ss = blockstore.StreamStore(bs=self.bs, ls=self.ls, entity_set=self.cdef['Streams']) self.random_rw()
def setUp(self): # noqa path = os.path.join(DATA_DIR, 'blockstore.xml') self.doc = edmx.Document() with open(path, 'rb') as f: self.doc.read(f) self.cdef = self.doc.root.DataServices['BlockSchema.BlockContainer'] self.container = InMemoryEntityContainer(self.cdef) self.mt_lock = threading.Lock() self.mt_count = 0 self.bs = blockstore.EDMBlockStore(entity_set=self.cdef['Blocks'], max_block_size=64) self.ls = blockstore.LockStore(entity_set=self.cdef['BlockLocks'])
def test_lock2(self): # now turn the timeouts around, short locks, long waits ls = blockstore.LockStore(entity_set=self.cdef['BlockLocks'], lock_timeout=1) hash_key = hashlib.sha256(b'Lockme').hexdigest() ls.lock(hash_key) # now we should wait long enough to grab the lock again try: ls.lock(hash_key, timeout=5) except blockstore.LockError: self.fail("Expected timeout on lock") ls.unlock(hash_key)
def __init__(self, db, dpath=None, **kwargs): self.container_def = self.load_container() #: the :py:class:`MySQLEntityContainer` used for the blockstore self.container = MySQLEntityContainer(db=db, container=self.container_def, **kwargs) if dpath is None: bs = blockstore.FileBlockStore(dpath) else: bs = blockstore.EDMBlockStore( entity_set=self.container_def['Blocks']) ls = blockstore.LockStore(entity_set=self.container_def['Locks']) blockstore.StreamStore.__init__(self, bs, ls, self.container_def['Streams'])
def test_lock_multithread(self): ls = blockstore.LockStore(entity_set=self.cdef['BlockLocks'], lock_timeout=3) threads = [] for i in range3(50): threads.append( threading.Thread(target=self.lock_runner, args=(ls, ))) for t in threads: t.start() time.sleep(1 if random.random() < 0.1 else 0) while threads: t = threads.pop() t.join() self.assertTrue(self.mt_count > 1) logging.info("%i out of %i threads obtained the lock", self.mt_count, 50)
def test_lock(self): ls = blockstore.LockStore(entity_set=self.cdef['BlockLocks']) hash_key = hashlib.sha256(b'Lockme').hexdigest() hash_key2 = hashlib.sha256(b'andme').hexdigest() # locks are keyed ls.lock(hash_key2) # we can grab a lock, but now try again and it should fail ls.lock(hash_key) try: ls.lock(hash_key, timeout=1) self.fail("Expected timeout on acquire") except blockstore.LockError: pass ls.unlock(hash_key) # unlocked it should work just fine ls.lock(hash_key, timeout=1) ls.unlock(hash_key) ls.unlock(hash_key2) # unlocking is benign - repeat and rinse ls.unlock(hash_key) ls.unlock(hash_key2)
def test_init(self): ls = blockstore.LockStore(entity_set=self.cdef['BlockLocks']) self.assertTrue(ls.lock_timeout == 180, "default lock timeout")