class DBWriter(threading.Thread): def __init__(self, path, f_len): threading.Thread.__init__(self) self.path = path if os.path.exists(self.path): os.remove(self.path) open(self.path, 'w').close() self.f_len = f_len print 'simulating data block with size = %s ...' % f_len self.db = None self.checksum = hashlib.sha1() def run(self): self.db = DataBlock(self.path) # self.f_len) f_len = self.f_len parts = random.randint(1, 11) for i in xrange(parts): data = ''.join( random.choice(string.letters) for i in xrange(f_len / parts)) self.checksum.update(data) self.db.write(data) time.sleep(.1) rest = f_len - (f_len / parts) * parts if rest: data = ''.join(random.choice(string.letters) for i in xrange(rest)) self.checksum.update(data) self.db.write(data) self.db.finalize() self.db.close() def get_checksum(self): return self.checksum.hexdigest()
class DBWriter(threading.Thread): def __init__(self, path, f_len): threading.Thread.__init__(self) self.path = path if os.path.exists(self.path): os.remove(self.path) open(self.path, 'w').close() self.f_len = f_len print 'simulating data block with size = %s ...'%f_len self.db = None self.checksum = hashlib.sha1() def run(self): self.db = DataBlock(self.path)# self.f_len) f_len = self.f_len parts = random.randint(1,11) for i in xrange(parts): data = ''.join(random.choice(string.letters) for i in xrange(f_len/parts)) self.checksum.update(data) self.db.write(data) time.sleep(.1) rest = f_len - (f_len/parts)*parts if rest: data = ''.join(random.choice(string.letters) for i in xrange(rest)) self.checksum.update(data) self.db.write(data) self.db.finalize() self.db.close() def get_checksum(self): return self.checksum.hexdigest()
class DBReader(threading.Thread): def __init__(self, path, f_len): threading.Thread.__init__(self) self.path = path self.f_len = f_len self.checksum = hashlib.sha1() def run(self): try: self.db = DataBlock(self.path, self.f_len) while True: data = self.db.read(1000) if not data: break self.checksum.update(data) self.db.close() os.remove(self.path) except Exception, err: print 'ERROR: %s'%err
class DBReader(threading.Thread): def __init__(self, path, f_len): threading.Thread.__init__(self) self.path = path self.f_len = f_len self.checksum = hashlib.sha1() def run(self): try: self.db = DataBlock(self.path, self.f_len) while True: data = self.db.read(1000) if not data: break self.checksum.update(data) self.db.close() os.remove(self.path) except Exception, err: print 'ERROR: %s' % err
def test_data_block(self): ks = FileBasedSecurityManager(CLIENT_KS_1024_PATH, PASSWD) DataBlock.SECURITY_MANAGER = ks DB_PATH = tmp('test_data_block.kst') DATA_LEN = 10 if os.path.exists(DB_PATH): os.remove(DB_PATH) db = DataBlock(DB_PATH, DATA_LEN, force_create=True) checksum = hashlib.sha1() for i in xrange(DATA_LEN / 10): data = ''.join( random.choice(string.letters) for i in xrange(DATA_LEN / (DATA_LEN / 10))) checksum.update(data) db.write(data) db.close() db.close() #should be OK or_checksum = checksum.hexdigest() enc_checksum = db.checksum() db = DataBlock(DB_PATH, DATA_LEN) ret_data = '' checksum = hashlib.sha1() while True: data = db.read(100) if not data: break ret_data += data checksum.update(data) self.assertEqual(or_checksum, checksum.hexdigest()) self.assertEqual(db.checksum(), enc_checksum) db = DataBlock(DB_PATH, DATA_LEN) raw = db.read_raw() self.assertEqual(db.checksum(), enc_checksum) db = DataBlock(DB_PATH, DATA_LEN) raw = db.read() self.assertEqual(ret_data, raw) app_db = DataBlock(DB_PATH) app_db.write('The end!') app_db.finalize() app_db.close() db = DataBlock(DB_PATH, actsize=True) raw = db.read() self.assertEqual(ret_data + 'The end!', raw) db.close()
def test_data_block(self): ks = FileBasedSecurityManager(CLIENT_KS_1024_PATH, PASSWD) DataBlock.SECURITY_MANAGER = ks DB_PATH = tmp('test_data_block.kst') DATA_LEN = 10 if os.path.exists(DB_PATH): os.remove(DB_PATH) db = DataBlock(DB_PATH, DATA_LEN, force_create=True) checksum = hashlib.sha1() for i in xrange(DATA_LEN/10): data = ''.join(random.choice(string.letters) for i in xrange(DATA_LEN/(DATA_LEN/10))) checksum.update(data) db.write(data) db.close() db.close() #should be OK or_checksum = checksum.hexdigest() enc_checksum = db.checksum() db = DataBlock(DB_PATH, DATA_LEN) ret_data = '' checksum = hashlib.sha1() while True: data = db.read(100) if not data: break ret_data += data checksum.update(data) self.assertEqual(or_checksum, checksum.hexdigest()) self.assertEqual(db.checksum(), enc_checksum) db = DataBlock(DB_PATH, DATA_LEN) raw = db.read_raw() self.assertEqual(db.checksum(), enc_checksum) db = DataBlock(DB_PATH, DATA_LEN) raw = db.read() self.assertEqual(ret_data, raw) app_db = DataBlock(DB_PATH) app_db.write('The end!') app_db.finalize() app_db.close() db = DataBlock(DB_PATH, actsize=True) raw = db.read() self.assertEqual(ret_data+'The end!', raw) db.close()
class Journal: # journal statuses JS_SYNC = 0 JS_NOT_SYNC = 1 JS_NOT_INIT = 2 JS_SYNC_FAILED = 3 # types of operations for journaling OT_APPEND = 1 OT_UPDATE = 2 OT_REMOVE = 3 RECORD_STRUCT = "<IBQ" RECORD_STRUCT_SIZE = struct.calcsize(RECORD_STRUCT) def __init__(self, journal_key, journal_path, fabnet_gateway): self.__journal_key = journal_key self.__journal_path = journal_path self.__journal = DataBlock(self.__journal_path, force_create=True) self.__fabnet_gateway = fabnet_gateway self.__last_record_id = 0 self.__no_foreign = True self.__is_sync = False self.__sync_failed = False self.__j_sync_thrd = JournalSyncThread(self) self.__j_sync_thrd.start() def get_journal_key(self): return self.__journal_key def __recv_journal(self): self.__journal.remove() self.__journal = DataBlock(self.__journal_path, force_create=True) is_recv = self.__fabnet_gateway.get(self.__journal_key, 2, self.__journal) if is_recv: self.__no_foreign = False self.__is_sync = True self.__journal.close() # next __journal.write reopen data block events_provider.info("journal", "Journal is received from NimbusFS backend") else: events_provider.warning("journal", "Can't receive journal from NimbusFS backend") self.__no_foreign = True def close(self): self.__journal.close() self.__j_sync_thrd.stop() @JLock def synchronized(self): return self.__is_sync @JLock def status(self): if self.__sync_failed: return self.JS_SYNC_FAILED if self.__no_foreign: return self.JS_NOT_INIT if not self.__is_sync: return self.JS_NOT_SYNC return self.JS_SYNC @JLock def _synchronize(self): try: logger.debug("synchronizing journal...") self.__journal.flush() j_data = DataBlock(self.__journal_path, actsize=True) is_send = self.__fabnet_gateway.put(j_data, key=self.__journal_key) if is_send: self.__is_sync = True self.__sync_failed = False except Exception, err: self.__sync_failed = True raise err
class Journal: #journal statuses JS_SYNC = 0 JS_NOT_SYNC = 1 JS_NOT_INIT = 2 JS_SYNC_FAILED = 3 #types of operations for journaling OT_APPEND = 1 OT_UPDATE = 2 OT_REMOVE = 3 RECORD_STRUCT = '<IBQ' RECORD_STRUCT_SIZE = struct.calcsize(RECORD_STRUCT) def __init__(self, journal_key, journal_path, fabnet_gateway): self.__journal_key = journal_key self.__journal_path = journal_path self.__journal = DataBlock(self.__journal_path, force_create=True) self.__fabnet_gateway = fabnet_gateway self.__last_record_id = 0 self.__no_foreign = True self.__is_sync = False self.__sync_failed = False self.__j_sync_thrd = JournalSyncThread(self) self.__j_sync_thrd.start() def get_journal_key(self): return self.__journal_key def __recv_journal(self): self.__journal.remove() self.__journal = DataBlock(self.__journal_path, force_create=True) is_recv = self.__fabnet_gateway.get(self.__journal_key, 2, self.__journal) if is_recv: self.__no_foreign = False self.__is_sync = True self.__journal.close() #next __journal.write reopen data block events_provider.info("journal", "Journal is received from NimbusFS backend") else: events_provider.warning( "journal", "Can't receive journal from NimbusFS backend") self.__no_foreign = True def close(self): self.__journal.close() self.__j_sync_thrd.stop() @JLock def synchronized(self): return self.__is_sync @JLock def status(self): if self.__sync_failed: return self.JS_SYNC_FAILED if self.__no_foreign: return self.JS_NOT_INIT if not self.__is_sync: return self.JS_NOT_SYNC return self.JS_SYNC @JLock def _synchronize(self): try: logger.debug('synchronizing journal...') self.__journal.flush() j_data = DataBlock(self.__journal_path, actsize=True) is_send = self.__fabnet_gateway.put(j_data, key=self.__journal_key) if is_send: self.__is_sync = True self.__sync_failed = False except Exception, err: self.__sync_failed = True raise err