Example #1
0
 def register_user(self):
     self.fabnet_gateway.init_socket_processor()
     self.journal.init()
     if self.metadata:
         self.metadata.close()
         self.metadata = MetadataFile(
             self.db_cache.get_static_cache_path(self.metadata_f_name),
             self.journal)
Example #2
0
    def start(self):
        self.fabnet_gateway.init_socket_processor()
        if not self.journal.foreign_exists():
            raise NoJournalFoundException('No journal for key = %s' %
                                          self.metadata_key)

        self.metadata = MetadataFile(
            self.db_cache.get_static_cache_path(self.metadata_f_name),
            self.journal)
        self.transactions_manager = TransactionsManager(
            self.metadata, self.db_cache, user_id=self.metadata_key)

        SmartFileObject.setup_transaction_manager(self.transactions_manager)

        self.put_manager = WorkersManager(PutWorker, self.fabnet_gateway, \
                self.transactions_manager, self.__parallel_put_count)
        self.get_manager = WorkersManager(GetWorker, self.fabnet_gateway, \
                self.transactions_manager, self.__parallel_get_count)
        self.delete_manager = WorkersManager(DeleteWorker, self.fabnet_gateway, \
                self.transactions_manager, 2)

        self.put_manager.start()
        self.get_manager.start()
        self.delete_manager.start()
    def test00_base(self):
        ks = FileBasedSecurityManager(CLIENT_KS_PATH, PASSWD)
        DataBlock.SECURITY_MANAGER = ks
        DataBlock.LOCK_MANAGER = DBLocksManager()
        remove_dir(tmp('smart_file_test'))
        os.makedirs(tmp('smart_file_test/dynamic_cache'))
        os.makedirs(tmp('smart_file_test/static_cache'))

        db_cache = DataBlockCache(tmp('smart_file_test'), user_id=sha1('test').hexdigest())
        md = MetadataFile(db_cache.get_static_cache_path('test_md.bin'))
        tr_manager = None
        try:
            tr_manager = TransactionsManager(md, db_cache)
            p_queue = tr_manager.get_upload_queue()
            g_queue = tr_manager.get_download_queue()
            SmartFileObject.setup_transaction_manager(tr_manager)

            e_file = SmartFileObject('/empty_file', for_write=True)
            e_file.close()

            e_file = SmartFileObject('/empty_file')
            data = e_file.read()
            self.assertEqual(data, '')
            e_file.close()

            test_file = SmartFileObject('/test.file', for_write=True)
            test_file.write('this is test message for one data block!')
            test_file.close()
            put_obj = p_queue.get(False)
            transaction, seek = put_obj
            self.assertEqual(seek, 0)
            data_block, next_seek, _ = transaction.get_data_block(seek, noclone=True)
            self.assertNotEqual(data_block, None)
            self.assertEqual(next_seek, None)
            data_block.close()

            tr_manager.update_transaction(transaction.get_id(), seek, is_failed=False, foreign_name='%040x'%123456)
            self.assertEqual(transaction.get_status(), Transaction.TS_FINISHED)

            db_path = '%s.%s'%(transaction.get_id(), seek)
            self.assertFalse(DataBlock.is_locked(db_cache.get_cache_path(db_path)))

            test_file = SmartFileObject('/test.file')
            data = test_file.read(4)
            self.assertEqual(data, 'this')
            self.assertTrue(DataBlock.is_locked(db_cache.get_cache_path(db_path)))
            data = test_file.read()
            self.assertEqual(data, ' is test message for one data block!')
            test_file.close()
            self.assertFalse(DataBlock.is_locked(db_cache.get_cache_path(db_path)))
            with self.assertRaises(Empty):
                get_obj = g_queue.get(False)

            mgt = MockedGetThread(g_queue)
            mgt.start()

            open(db_cache.get_cache_path(db_path), 'w').write('invalid data') #failed local data block
            test_file = SmartFileObject('/test.file')
            data = test_file.read()
            self.assertEqual(data, 'this is test message for one data block!')
            test_file.close()

            db_cache.clear_all()

            test_file = SmartFileObject('/test.file')
            data = test_file.read()
            self.assertEqual(data, 'this is test message for one data block!')
            test_file.close()

            self.assertFalse(DataBlock.is_locked(db_cache.get_cache_path('%040x'%123456)))
        finally:
            if tr_manager:
                g_queue.put((None,None))
                tr_manager.close()
            md.close()
            db_cache.stop()
    def test01_trans_manager(self):
        db_cache = DataBlockCache(tmp('smart_file_test'), user_id=sha1('test').hexdigest())
        open(db_cache.get_static_cache_path('transactions-share.log'), 'w').close()
        try:
            db_cache.clear_all()

            md = MetadataFile(db_cache.get_static_cache_path('test_md.bin'))
            tr_manager = TransactionsManager(md, db_cache, 2)

            transaction_id = tr_manager.start_upload_transaction('/not_cached_test.file')
            tr_manager.transfer_data_block(transaction_id, 0, 3500, \
                    DataBlock(db_cache.get_cache_path('fake_for_delete'), force_create=True))
            self.assertTrue(os.path.exists(db_cache.get_cache_path('fake_for_delete')))
            fake_for_delete_name = '%s.%s'%(transaction_id, 0)

            transaction_id = tr_manager.start_upload_transaction('/my_second_test.file')
            tr_manager.transfer_data_block(transaction_id, 0, 1000, DataBlock(db_cache.get_cache_path('fake')))
            tr_manager.transfer_data_block(transaction_id, 1000, 2000, DataBlock(db_cache.get_cache_path('fake1')))
            tr_manager.update_transaction_state(transaction_id, Transaction.TS_LOCAL_SAVED)
            tr_manager.transfer_data_block(transaction_id, 0, 1000, DataBlock(db_cache.get_cache_path('fake2')), '%040x'%123456)

            transaction = tr_manager.start_download_transaction('/test.file')
            db,_,_ = transaction.get_data_block(0)
            read_block_name = '%s.%s'%(transaction.get_id(), 0)
            self.assertTrue(os.path.exists(db_cache.get_cache_path(read_block_name)))
            db.close()

            md.close()
            tr_manager.close()
            self.assertFalse(DataBlock.is_locked(db_cache.get_cache_path(read_block_name)))

            md = MetadataFile(db_cache.get_static_cache_path('test_md.bin'))
            tr_manager = TransactionsManager(md, db_cache, 2)

            up_queue = tr_manager.get_upload_queue()
            self.assertEqual(up_queue.qsize(), 1)

            self.assertFalse(os.path.exists(db_cache.get_cache_path(fake_for_delete_name)))
            self.assertFalse(os.path.exists(db_cache.get_cache_path(read_block_name)), read_block_name)
            md.close()
            tr_manager.close()

            open(db_cache.get_static_cache_path('transactions-share.log'), 'w').close()

            md = MetadataFile(db_cache.get_static_cache_path('test_md.bin'))
            tr_manager = TransactionsManager(md, db_cache, 5)
            transactions = []
            for i in xrange(7):
                transaction_id = tr_manager.start_upload_transaction('/%s_test.file'%i)
                transactions.append(transaction_id)

            cnt = 0
            for i, (is_up, path, stat, size, progress) in enumerate(tr_manager.iterate_transactions()):
                self.assertEqual(path, '/%s_test.file'%i)
                cnt += 1
            self.assertEqual(cnt, 7)

            for tr_id in transactions:
                tr_manager.update_transaction_state(tr_id, Transaction.TS_FAILED)

            cnt = 0
            tr_manager.start_upload_transaction('/7_test.file')
            for i, (is_up, path, stat, size, progress) in enumerate(tr_manager.iterate_transactions()):
                self.assertEqual(path, '/%s_test.file'%(i+3))
                cnt += 1
            self.assertEqual(cnt, 5)

            for i in xrange(5):
                tr_manager.start_upload_transaction('/%s_2_test.file'%i)
        finally:
            db_cache.stop()