def copy_and_initialize_upgrade_database(self, db_name): # create a file to be removed in the thumbnails os.mkdir(self.session.config.get_torrent_collecting_dir()) os.mkdir( os.path.join(self.session.config.get_torrent_collecting_dir(), 'dir1')) self.write_data_to_file( os.path.join(self.session.config.get_torrent_collecting_dir(), 'dir1', 'file1.txt')) os.mkdir(os.path.join(self.session_base_dir, 'sqlite')) shutil.copyfile( os.path.join(self.DATABASES_DIR, db_name), os.path.join(self.session.config.get_state_dir(), 'sqlite', 'tribler.sdb')) shutil.copyfile( os.path.join(self.DATABASES_DIR, 'torrent_upgrade_64_dispersy.db'), os.path.join(self.session.config.get_state_dir(), 'sqlite', 'dispersy.db')) db_path = os.path.join(self.session.config.get_state_dir(), 'sqlite', 'tribler.sdb') self.sqlitedb = SQLiteCacheDB(db_path) self.sqlitedb.initialize() self.sqlitedb.initial_begin() self.session.sqlite_db = self.sqlitedb
class AbstractDB(AbstractServer): def setUp(self): super(AbstractDB, self).setUp() # dummy session self.config = SessionStartupConfig() self.config.set_state_dir(self.getStateDir()) self.config.set_torrent_checking(False) self.config.set_multicast_local_peer_discovery(False) self.config.set_megacache(False) self.config.set_dispersy(False) self.config.set_mainline_dht(False) self.config.set_torrent_collecting(False) self.config.set_libtorrent(False) self.config.set_dht_torrent_collecting(False) self.config.set_videoplayer(False) self.config.set_torrent_store(False) self.session = Session(self.config, ignore_singleton=True) dbpath = init_bak_tribler_sdb('bak_new_tribler.sdb', destination_path=self.getStateDir(), overwrite=True) self.sqlitedb = SQLiteCacheDB(self.session, busytimeout=BUSYTIMEOUT) self.sqlitedb.initialize(dbpath) self.session.sqlite_db = self.sqlitedb @blocking_call_on_reactor_thread def tearDown(self): self.sqlitedb.close() self.sqlitedb = None self.session.del_instance() self.session = None super(AbstractDB, self).tearDown(self)
def test_no_permission_on_script(self): db_path = os.path.join(self.session_base_dir, "test_db.db") new_script_path = os.path.join(self.session_base_dir, "script.sql") shutil.copyfile(self.tribler_db_script, new_script_path) os.chmod(new_script_path, 0) sqlite_test_2 = SQLiteCacheDB(db_path, new_script_path) sqlite_test_2.initialize()
class AbstractUpgrader(TriblerCoreTest): FILE_DIR = os.path.abspath(os.path.dirname(os.path.realpath(__file__))) DATABASES_DIR = os.path.abspath( os.path.join(FILE_DIR, u"../data/upgrade_databases/")) def write_data_to_file(self, file_name): with open(file_name, 'w') as file: file.write("lorem ipsum") file.close() @blocking_call_on_reactor_thread @inlineCallbacks def setUp(self): yield super(AbstractUpgrader, self).setUp() self.config = TriblerConfig(ConfigObj(configspec=CONFIG_SPEC_PATH)) self.config.set_state_dir(self.getStateDir()) self.config.set_torrent_collecting_dir( os.path.join(self.session_base_dir, 'torrent_collecting_dir')) self.session = Session(self.config) self.sqlitedb = None self.torrent_store = None def tearDown(self): if self.torrent_store: self.torrent_store.close() super(AbstractUpgrader, self).tearDown() if self.sqlitedb: self.sqlitedb.close() self.sqlitedb = None self.session = None def copy_and_initialize_upgrade_database(self, db_name): # create a file to be removed in the thumbnails os.mkdir(self.session.config.get_torrent_collecting_dir()) os.mkdir( os.path.join(self.session.config.get_torrent_collecting_dir(), 'dir1')) self.write_data_to_file( os.path.join(self.session.config.get_torrent_collecting_dir(), 'dir1', 'file1.txt')) os.mkdir(os.path.join(self.session_base_dir, 'sqlite')) shutil.copyfile( os.path.join(self.DATABASES_DIR, db_name), os.path.join(self.session.config.get_state_dir(), 'sqlite', 'tribler.sdb')) shutil.copyfile( os.path.join(self.DATABASES_DIR, 'torrent_upgrade_64_dispersy.db'), os.path.join(self.session.config.get_state_dir(), 'sqlite', 'dispersy.db')) db_path = os.path.join(self.session.config.get_state_dir(), 'sqlite', 'tribler.sdb') self.sqlitedb = SQLiteCacheDB(db_path) self.sqlitedb.initialize() self.sqlitedb.initial_begin() self.session.sqlite_db = self.sqlitedb
def setUp(self): yield super(TestSqliteCacheDB, self).setUp() db_path = u":memory:" self.sqlite_test = SQLiteCacheDB(db_path) self.sqlite_test.set_show_sql(True)
def test_open_db_script_file(self): sqlite_test_2 = SQLiteCacheDB( os.path.join(self.session_base_dir, "test_db.db"), DB_SCRIPT_ABSOLUTE_PATH) sqlite_test_2.write_version(4) self.assertEqual(sqlite_test_2.version, 4)
class AbstractDB(AbstractServer): def setUp(self): super(AbstractDB, self).setUp() # dummy session self.config = SessionStartupConfig() self.config.set_state_dir(self.getStateDir()) self.config.set_torrent_checking(False) self.config.set_multicast_local_peer_discovery(False) self.config.set_megacache(False) self.config.set_dispersy(False) self.config.set_mainline_dht(False) self.config.set_torrent_collecting(False) self.config.set_libtorrent(False) self.config.set_dht_torrent_collecting(False) self.config.set_videoplayer(False) self.config.set_torrent_store(False) self.session = Session(self.config, ignore_singleton=True) dbpath = init_bak_tribler_sdb('bak_new_tribler.sdb', destination_path=self.getStateDir(), overwrite=True) self.sqlitedb = SQLiteCacheDB(self.session, busytimeout=BUSYTIMEOUT) self.sqlitedb.initialize(dbpath) self.session.sqlite_db = self.sqlitedb @blocking_call_on_reactor_thread def tearDown(self): self.sqlitedb.close() self.sqlitedb = None self.session.del_instance() self.session = None super(AbstractDB, self).tearDown(self)
class TestContentRepositoryWithRealDatabase(TestBase): """ Tests content repository with real database. """ def setUp(self): super(TestContentRepositoryWithRealDatabase, self).setUp() session_base_dir = self.temporary_directory() tar = tarfile.open(os.path.join(TESTS_DATA_DIR, 'bak_new_tribler.sdb.tar.gz'), 'r|gz') tar.extractall(session_base_dir) db_path = os.path.join(session_base_dir, 'bak_new_tribler.sdb') self.sqlitedb = SQLiteCacheDB(db_path, busytimeout=BUSYTIMEOUT) session = MockObject() session.sqlite_db = self.sqlitedb session.notifier = MockObject() self.torrent_db = TorrentDBHandler(session) channel_db = MockObject() self.content_repository = ContentRepository(self.torrent_db, channel_db) def tearDown(self): self.torrent_db.close() self.sqlitedb.close() super(TestContentRepositoryWithRealDatabase, self).tearDown() def test_update_db_from_search_results(self): """ Test if database is properly updated with the search results. Should not raise any UnicodeDecodeError. """ # Add a torrent infohash before updating from search results infohash = unhexlify('ed81da94d21ad1b305133f2726cdaec5a57fed98') self.content_repository.torrent_db.addOrGetTorrentID(infohash) # Sample search results name = 'Puppy.Linux.manual.301.espa\xc3\xb1ol.pdf' length = random.randint(1000, 9999) num_files = random.randint(1, 10) category_list = ['other'] creation_date = random.randint(1000000, 111111111) seeders = random.randint(10, 200) leechers = random.randint(5, 1000) cid = None search_results = [[infohash, name, length, num_files, category_list, creation_date, seeders, leechers, cid]] # Update from search results self.content_repository.update_from_torrent_search_results(search_results) # Check if database has correct results torrent_info = self.content_repository.get_torrent(infohash) expected_name = u'Puppy.Linux.manual.301.espa\xc3\xb1ol.pdf' self.assertEqual(expected_name, torrent_info['name']) self.assertEqual(seeders, torrent_info['num_seeders']) self.assertEqual(leechers, torrent_info['num_leechers']) self.assertEqual(creation_date, torrent_info['creation_date']) self.assertEqual(num_files, torrent_info['num_files']) self.assertEqual(length, torrent_info['length'])
def start_database(self): """ Start the SQLite database. """ db_path = os.path.join(self.config.get_state_dir(), DB_FILE_RELATIVE_PATH) self.sqlite_db = SQLiteCacheDB(db_path) self.sqlite_db.initialize() self.sqlite_db.initial_begin()
def start_database(self): """ Start the SQLite database. """ db_path = os.path.join(self.get_state_dir(), DB_FILE_RELATIVE_PATH) db_script_path = os.path.join(get_lib_path(), DB_SCRIPT_NAME) self.sqlite_db = SQLiteCacheDB(db_path, db_script_path) self.sqlite_db.initialize() self.sqlite_db.initial_begin()
def tearDown(self): if SQLiteCacheDB.hasInstance(): SQLiteCacheDB.getInstance().close_all() SQLiteCacheDB.delInstance() if Session.has_instance(): # Upgrading will create a session instance Session.del_instance() sqlitecachedb.INITIAL_UPGRADE_PAUSE, sqlitecachedb.SUCCESIVE_UPGRADE_PAUSE, sqlitecachedb.UPGRADE_BATCH_SIZE, sqlitecachedb.TEST_OVERRIDE = self.original_values self.tearDownCleanup()
def tearDown(self): if SQLiteCacheDB.hasInstance(): SQLiteCacheDB.getInstance().close_all() SQLiteCacheDB.delInstance() if Session.has_instance(): # Upgrading will create a session instance Session.del_instance() sqlitecachedb.INITIAL_UPGRADE_PAUSE, sqlitecachedb.SUCCESIVE_UPGRADE_PAUSE, sqlitecachedb.UPGRADE_BATCH_SIZE, sqlitecachedb.TEST_OVERRIDE = self.original_values self.tearDownCleanup()
def test_integrity_check_failed(self): sqlite_test_2 = SQLiteCacheDB(os.path.join(self.session_base_dir, "test_db.db"), os.path.join(self.SQLITE_SCRIPTS_DIR, "script1.sql")) def execute(sql): if sql == u"PRAGMA quick_check": db_response = MockObject() db_response.next = lambda: ("Error: database disk image is malformed", ) return db_response sqlite_test_2.execute = execute
def prestart(self): """ Pre-starts the session. We check the current version and upgrade if needed - before we start everything else. """ self.sqlite_db = SQLiteCacheDB(self) self.sqlite_db.initialize() self.sqlite_db.initial_begin() self.upgrader = TriblerUpgrader(self, self.sqlite_db) self.upgrader.run() return self.upgrader
def setUp(self): yield super(TestSqliteCacheDB, self).setUp() db_path = u":memory:" self.sqlite_test = SQLiteCacheDB(db_path) self.sqlite_test.set_show_sql(True) self.sqlite_test.initialize() import Tribler self.tribler_db_script = os.path.join( os.path.dirname(Tribler.__file__), DB_SCRIPT_NAME)
def test_integrity_check_failed(self): sqlite_test_2 = SQLiteCacheDB(os.path.join(self.session_base_dir, "test_db.db"), os.path.join(self.SQLITE_SCRIPTS_DIR, "script1.sql")) def execute(sql): if sql == u"PRAGMA quick_check": db_response = MockObject() db_response.next = lambda: ("Error: database disk image is malformed", ) return db_response sqlite_test_2.execute = execute sqlite_test_2.initialize()
def tearDown(self): self.torrentChecking.shutdown() TorrentChecking.delInstance() if SQLiteCacheDB.hasInstance(): SQLiteCacheDB.getInstance().close_all() SQLiteCacheDB.delInstance() TorrentDBHandler.delInstance() MyPreferenceDBHandler.delInstance() NetworkBuzzDBHandler.delInstance() self.tearDownCleanup()
def tearDown(self): self.torrentChecking.shutdown() TorrentChecking.delInstance() if SQLiteCacheDB.hasInstance(): SQLiteCacheDB.getInstance().close_all() SQLiteCacheDB.delInstance() TorrentDBHandler.delInstance() MyPreferenceDBHandler.delInstance() NetworkBuzzDBHandler.delInstance() self.tearDownCleanup()
def setUp(self): yield super(AbstractDB, self).setUp() self.setUpPreSession() self.session = Session(self.config) tar = tarfile.open(os.path.join(TESTS_DATA_DIR, 'bak_new_tribler.sdb.tar.gz'), 'r|gz') tar.extractall(self.session_base_dir) db_path = os.path.join(self.session_base_dir, 'bak_new_tribler.sdb') self.sqlitedb = SQLiteCacheDB(db_path, busytimeout=BUSYTIMEOUT) self.session.sqlite_db = self.sqlitedb
def test_clean_db(self): sqlite_test_2 = SQLiteCacheDB( os.path.join(self.session_base_dir, "test_db.db"), DB_SCRIPT_ABSOLUTE_PATH) sqlite_test_2.initialize() sqlite_test_2.initial_begin() sqlite_test_2.clean_db(vacuum=True, exiting=True)
def test_clean_db(self): sqlite_test_2 = SQLiteCacheDB( os.path.join(self.session_base_dir, "test_db.db"), self.tribler_db_script) sqlite_test_2.initialize() sqlite_test_2.initial_begin() sqlite_test_2.clean_db(vacuum=True, exiting=True)
class TestSqliteCacheDB(AbstractServer): def setUp(self): super(TestSqliteCacheDB, self).setUp() self.config = SessionStartupConfig() self.config.set_state_dir(self.getStateDir()) self.config.set_torrent_checking(False) self.config.set_multicast_local_peer_discovery(False) self.config.set_megacache(False) self.config.set_dispersy(False) self.config.set_mainline_dht(False) self.config.set_torrent_collecting(False) self.config.set_libtorrent(False) self.config.set_dht_torrent_collecting(False) self.config.set_videoplayer(False) self.session = Session(self.config, ignore_singleton=True) self.sqlitedb = None def tearDown(self): super(TestSqliteCacheDB, self).tearDown() if self.sqlitedb: self.sqlitedb.close() self.sqlitedb = None self.session.del_instance() self.session = None def test_upgrade_from_obsolete_version(self): """We no longer support DB versions older than 17 (Tribler 6.0)""" dbpath = init_bak_tribler_sdb(u"bak_old_tribler.sdb", destination_path=self.getStateDir(), overwrite=True) self.sqlitedb = SQLiteCacheDB(self.session) self.sqlitedb.initialize(dbpath) class MockTorrentStore(object): def flush(): pass def close(): pass db_migrator = DBUpgrader(self.session, self.sqlitedb, torrent_store=MockTorrentStore()) self.assertRaises(VersionNoLongerSupportedError, db_migrator.start_migrate) def test_upgrade_from_17(self): pass
def setUp(self): yield super(AbstractDB, self).setUp() self.setUpPreSession() self.session = Session(self.config, ignore_singleton=True) tar = tarfile.open(os.path.join(TESTS_DATA_DIR, 'bak_new_tribler.sdb.tar.gz'), 'r|gz') tar.extractall(self.session_base_dir) db_path = os.path.join(self.session_base_dir, 'bak_new_tribler.sdb') db_script_path = os.path.join(get_lib_path(), DB_SCRIPT_NAME) self.sqlitedb = SQLiteCacheDB(db_path, db_script_path, busytimeout=BUSYTIMEOUT) self.sqlitedb.initialize() self.session.sqlite_db = self.sqlitedb
def read_data(self): db = SQLiteCacheDB.getInstance() one = db.fetchone('select * from person') assert one == ('a', 'b'), str(one) one = db.fetchone("select lastname from person where firstname == 'b'") assert one == 'a' one = db.fetchone("select lastname from person where firstname == 'c'") assert one == None all = db.fetchall('select * from person') assert len(all) == 101, len(all) num = db.size('person') assert num == 101 db.insert('person', lastname='1', firstname='abc') one = db.fetchone("select firstname from person where lastname == '1'") assert one == '1' or one == 'abc' all = db.fetchall("select firstname from person where lastname == '1'") assert len(all) == 2 db.update('person', "lastname == '2'", firstname='56') one = db.fetchone("select firstname from person where lastname == '2'") assert one == '56', one db.update('person', "lastname == '3'", firstname=65) one = db.fetchone("select firstname from person where lastname == '3'") assert one == 65, one db.update('person', "lastname == '4'", firstname=654, lastname=44) one = db.fetchone("select firstname from person where lastname == 44") assert one == 654, one db.close()
def setUp(self): db_path = TRIBLER_DB_PATH db = SQLiteCacheDB.getInstance() db.openDB(db_path, lib=LIB, autocommit=AUTOCOMMIT, busytimeout=BUSYTIMEOUT) self.sp1 = '0R0\x10\x06\x07*\x86H\xce=\x02\x01\x06\x05+\x81\x04\x00\x1a\x03>\x00\x04\x00\\\xdfXv\xffX\xf2\xfe\x96\xe1_]\xf5\x1b\xb4\x91\x91\xa5I\xf0nl\x81\xd2A\xfb\xb7u)\x01T\xa9*)r\x9b\x81s\xb7j\xd2\xecrSg$;\xc8"7s\xecSF\xd3\x0bgK\x1c' self.sp2 = '0R0\x10\x06\x07*\x86H\xce=\x02\x01\x06\x05+\x81\x04\x00\x1a\x03>\x00\x04\x01\xdb\x80+O\xd9N7`\xfc\xd3\xdd\xf0 \xfdC^\xc9\xd7@\x97\xaa\x91r\x1c\xdeL\xf2n\x9f\x00U\xc1A\xf9Ae?\xd8t}_c\x08\xb3G\xf8g@N! \xa0\x90M\xfb\xca\xcfZ@' self.fr1 = str2bin('MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAL/l2IyVa6lc3KAqQyEnR++rIzi+AamnbzXHCxOFAFy67COiBhrC79PLzzUiURbHDx21QA4p8w3UDHLA')
def setUp(self): db_path = TRIBLER_DB_PATH db = SQLiteCacheDB.getInstance() db.openDB(db_path, lib=LIB, autocommit=AUTOCOMMIT, busytimeout=BUSYTIMEOUT)
def __init__(self): self._sqlite_cache_db = SQLiteCacheDB.getInstance() crawler = Crawler.get_instance() if crawler.am_crawler(): msg = "# Crawler started" self.__log(msg)
def setUp(self): yield super(TestSqliteCacheDB, self).setUp() db_path = u":memory:" self.sqlite_test = SQLiteCacheDB(db_path) self.sqlite_test.set_show_sql(True)
def setUp(self): db_path = TRIBLER_DB_PATH db = SQLiteCacheDB.getInstance() db.initDB(db_path, busytimeout=BUSYTIMEOUT) launchmany = FakeLaunchmany(db) overlay_bridge = FakeOverlayBridge() self.datahandler = DataHandler(launchmany,overlay_bridge)
def read_data(self): db = SQLiteCacheDB.getInstance() one = db.fetchone('select * from person') assert one == ('a','b'), str(one) one = db.fetchone("select lastname from person where firstname == 'b'") assert one == 'a' one = db.fetchone("select lastname from person where firstname == 'c'") assert one == None all = db.fetchall('select * from person') assert len(all) == 101, len(all) num = db.size('person') assert num == 101 db.insert('person', lastname='1', firstname='abc') one = db.fetchone("select firstname from person where lastname == '1'") assert one == '1' or one == 'abc' all = db.fetchall("select firstname from person where lastname == '1'") assert len(all) == 2 db.update('person', "lastname == '2'", firstname='56') one = db.fetchone("select firstname from person where lastname == '2'") assert one == '56', one db.update('person', "lastname == '3'", firstname=65) one = db.fetchone("select firstname from person where lastname == '3'") assert one == 65, one db.update('person', "lastname == '4'", firstname=654, lastname=44) one = db.fetchone("select firstname from person where lastname == 44") assert one == 654, one db.close()
def setUp(self): db_path = TRIBLER_DB_PATH db = SQLiteCacheDB.getInstance() db.initDB(db_path, busytimeout=BUSYTIMEOUT) launchmany = FakeLaunchmany(db) overlay_bridge = FakeOverlayBridge() self.datahandler = DataHandler(launchmany, overlay_bridge)
def setUp(self): self.setUpCleanup() dbpath = init_bak_tribler_sdb('bak_new_tribler.sdb', destination_path=self.getStateDir(), overwrite=True) self.sqlitedb = SQLiteCacheDB.getInstance() self.sqlitedb.initDB(dbpath, busytimeout=BUSYTIMEOUT) self.sqlitedb.waitForUpdateComplete()
def test_open_close_db(self): sqlite_test = SQLiteCacheDB.getInstance() sqlite_test.openDB(self.db_path, 1250) sqlite_test.close() sqlite_test.close() sqlite_test.openDB(self.db_path, 0) sqlite_test.close()
def setUp(self): super(TestContentRepositoryWithRealDatabase, self).setUp() session_base_dir = self.temporary_directory() tar = tarfile.open(os.path.join(TESTS_DATA_DIR, 'bak_new_tribler.sdb.tar.gz'), 'r|gz') tar.extractall(session_base_dir) db_path = os.path.join(session_base_dir, 'bak_new_tribler.sdb') self.sqlitedb = SQLiteCacheDB(db_path, busytimeout=BUSYTIMEOUT) session = MockObject() session.sqlite_db = self.sqlitedb session.notifier = MockObject() self.torrent_db = TorrentDBHandler(session) channel_db = MockObject() self.content_repository = ContentRepository(self.torrent_db, channel_db)
def keep_reading_data(self, period): db = SQLiteCacheDB.getInstance() st = time() oldnum = 0 self.all = [] self.read_times = 0 if DEBUG_R: print "begin read", self.getName(), period, time() while True: et = time() if et - st > period: break if DEBUG_R: print "...start read", self.getName(), time() sys.stdout.flush() try: self.all = db.fetchall("select * from person") self.last_read = time() - st self.read_times += 1 except Exception, msg: print_exc() print "*-*", Exception, msg self.read_locks += 1 if DEBUG: print >> sys.stdout, "Locked while reading!", self.read_locks sys.stdout.flush() else: if DEBUG_R: print "...end read", self.getName(), time(), len( self.all) sys.stdout.flush()
def __init__(self): self._sqlite_cache_db = SQLiteCacheDB.getInstance() crawler = Crawler.get_instance() if crawler.am_crawler(): msg = "# Crawler started" self.__log(msg)
def test_open_close_db(self): sqlite_test = SQLiteCacheDB.getInstance() sqlite_test.openDB(self.db_path, 1250) sqlite_test.close() sqlite_test.close() sqlite_test.openDB(self.db_path, 0) sqlite_test.close()
def keep_reading_data(self, period): db = SQLiteCacheDB.getInstance() st = time() oldnum = 0 self.all = [] self.read_times = 0 if DEBUG_R: print "begin read", self.getName(), period, time() while True: et = time() if et-st > period: break if DEBUG_R: print "...start read", self.getName(), time() sys.stdout.flush() try: self.all = db.fetchall("select * from person") self.last_read = time()-st self.read_times += 1 except Exception, msg: print_exc() print "*-*", Exception, msg self.read_locks += 1 if DEBUG: print >> sys.stdout, "Locked while reading!", self.read_locks sys.stdout.flush() else: if DEBUG_R: print "...end read", self.getName(), time(), len(self.all) sys.stdout.flush()
def test_open_db_script_file(self): sqlite_test_2 = SQLiteCacheDB( os.path.join(self.session_base_dir, "test_db.db"), self.tribler_db_script) sqlite_test_2.initialize() sqlite_test_2.initial_begin() sqlite_test_2.write_version(4) self.assertEqual(sqlite_test_2.version, 4)
def test_perform_upgrade(self): dbpath = init_bak_tribler_sdb('bak_old_tribler.sdb', destination_path=self.getStateDir(), overwrite=True) self.sqlitedb = SQLiteCacheDB.getInstance() self.sqlitedb.initDB(dbpath) self.sqlitedb.waitForUpdateComplete()
def start_database(self): """ Start the SQLite database. """ db_path = os.path.join(self.config.get_state_dir(), DB_FILE_RELATIVE_PATH) self.sqlite_db = SQLiteCacheDB(db_path) self.readable_status = STATE_OPEN_DB
class TestSqliteCacheDB(AbstractServer): def setUp(self): super(TestSqliteCacheDB, self).setUp() self.config = SessionStartupConfig() self.config.set_state_dir(self.getStateDir()) self.config.set_torrent_checking(False) self.config.set_multicast_local_peer_discovery(False) self.config.set_megacache(False) self.config.set_dispersy(False) self.config.set_mainline_dht(False) self.config.set_torrent_collecting(False) self.config.set_libtorrent(False) self.config.set_dht_torrent_collecting(False) self.config.set_videoplayer(False) self.session = Session(self.config, ignore_singleton=True) self.sqlitedb = None def tearDown(self): super(TestSqliteCacheDB, self).tearDown() if self.sqlitedb: self.sqlitedb.close() self.sqlitedb = None self.session.del_instance() self.session = None def test_upgrade_from_obsolete_version(self): """We no longer support DB versions older than 17 (Tribler 6.0)""" dbpath = init_bak_tribler_sdb(u"bak_old_tribler.sdb", destination_path=self.getStateDir(), overwrite=True) self.sqlitedb = SQLiteCacheDB(self.session) self.sqlitedb.initialize(dbpath) class MockTorrentStore(object): def flush(): pass def close(): pass db_migrator = DBUpgrader(self.session, self.sqlitedb, torrent_store=MockTorrentStore()) self.assertRaises(VersionNoLongerSupportedError, db_migrator.start_migrate) def test_upgrade_from_17(self): pass
def keep_writing_data(self, period, num_write, commit=False): db = SQLiteCacheDB.getInstance() st = time() if DEBUG: print "begin write", self.getName(), period, time() sys.stdout.flush() begin_time = time() w_times = [] c_times = [] self.last_write = 0 try: while True: st = time() if st-begin_time > period: break #db.begin() values = [] for i in range(num_write): value = (str(i)+'"'+"'", str(i**2)+'"'+"'") values.append(value) try: st = time() if DEBUG: print '-'+self.num + "start write", self.getName(), self.writes, time()-begin_time sys.stdout.flush() sql = 'INSERT INTO person VALUES (?, ?)' db.executemany(sql, values, commit=commit) self.last_write = time()-begin_time write_time = time()-st w_times.append(write_time) if DEBUG: print '-'+self.num + "end write", self.getName(), '+', write_time sys.stdout.flush() self.writes += 1 except apsw.BusyError: self.write_locks += 1 if DEBUG: if commit: s = "Writing/Commiting" else: s = "Writing" print >> sys.stdout, '>'+self.num + "Locked while ", s, self.getName(), self.write_locks, time()-st sys.stdout.flush() continue if SLEEP_W >= 0: sleep(SLEEP_W/1000.0) if DO_STH > 0: do_sth(DO_STH) except Exception, msg: print_exc() print >> sys.stderr, "On Error", time(), begin_time, time()-begin_time, Exception, msg, self.getName()
def setUp(self): super(TestSqliteCacheDB, self).setUp() self.config = SessionStartupConfig() self.config.set_state_dir(self.getStateDir()) self.config.set_torrent_checking(False) self.config.set_multicast_local_peer_discovery(False) self.config.set_megacache(False) self.config.set_dispersy(False) self.config.set_mainline_dht(False) self.config.set_torrent_collecting(False) self.config.set_libtorrent(False) self.config.set_dht_torrent_collecting(False) self.config.set_videoplayer(False) self.session = Session(self.config, ignore_singleton=True) self.sqlite_test = SQLiteCacheDB(self.session) self.db_path = u":memory:" self.sqlite_test.initialize(self.db_path)
def setUp(self): db_path = TRIBLER_DB_PATH db = SQLiteCacheDB.getInstance() db.openDB(db_path, lib=LIB, autocommit=AUTOCOMMIT, busytimeout=BUSYTIMEOUT) self.sp1 = '0R0\x10\x06\x07*\x86H\xce=\x02\x01\x06\x05+\x81\x04\x00\x1a\x03>\x00\x04\x00\\\xdfXv\xffX\xf2\xfe\x96\xe1_]\xf5\x1b\xb4\x91\x91\xa5I\xf0nl\x81\xd2A\xfb\xb7u)\x01T\xa9*)r\x9b\x81s\xb7j\xd2\xecrSg$;\xc8"7s\xecSF\xd3\x0bgK\x1c' self.sp2 = '0R0\x10\x06\x07*\x86H\xce=\x02\x01\x06\x05+\x81\x04\x00\x1a\x03>\x00\x04\x01\xdb\x80+O\xd9N7`\xfc\xd3\xdd\xf0 \xfdC^\xc9\xd7@\x97\xaa\x91r\x1c\xdeL\xf2n\x9f\x00U\xc1A\xf9Ae?\xd8t}_c\x08\xb3G\xf8g@N! \xa0\x90M\xfb\xca\xcfZ@' fake_permid_x = 'fake_permid_x'+'0R0\x10\x00\x07*\x86H\xce=\x02\x01\x06\x05+\x81\x04\x00\x1a\x03>\x00\x04' hp = db.hasPeer(fake_permid_x) assert not hp
def create_db(self, db_path, busytimeout=DEFAULT_BUSY_TIMEOUT): create_sql = "create table person(lastname, firstname);" db = SQLiteCacheDB.getInstance() tmp_sql_path = 'tmp.sql' f = open(tmp_sql_path, 'w') f.write(create_sql) f.close() #print "initDB", db_path db.initDB(db_path, tmp_sql_path, busytimeout=busytimeout, check_version=False) os.remove(tmp_sql_path)
def __init__(self): self._sqlite_cache_db = SQLiteCacheDB.getInstance() crawler = Crawler.get_instance() if crawler.am_crawler(): self._file = open("databasecrawler.txt", "a") self._file.write("".join(("# ", "*" * 80, "\n# ", strftime("%Y/%m/%d %H:%M:%S"), " Crawler started\n"))) self._file.flush() else: self._file = None
def test_integrity_check_triggered(self): """ Tests if integrity check is triggered if temporary rollback files are present.""" def do_integrity_check(_): do_integrity_check.called = True db_path = os.path.join(self.session_base_dir, "test_db.db") sqlite_test = SQLiteCacheDB(db_path) sqlite_test.do_quick_integrity_check = do_integrity_check do_integrity_check.called = False self.assertFalse(do_integrity_check.called) db_path2 = os.path.join(self.session_base_dir, "test_db2.db") wal_file = open(os.path.join(self.session_base_dir, "test_db2.db-shm"), 'w') wal_file.close() do_integrity_check.called = False SQLiteCacheDB.do_quick_integrity_check = do_integrity_check sqlite_test_2 = SQLiteCacheDB(db_path2) self.assertTrue(do_integrity_check.called)
def tearDown(self): self.annotate(self._testMethodName, start=False) """ unittest test tear down code """ if self.session is not None: self._shutdown_session(self.session) Session.del_instance() time.sleep(10) gc.collect() ts = enumerate_threads() print >> sys.stderr, "test_as_server: Number of threads still running", len(ts) for t in ts: print >> sys.stderr, "test_as_server: Thread still running", t.getName(), "daemon", t.isDaemon(), "instance:", t if SQLiteCacheDB.hasInstance(): SQLiteCacheDB.getInstance().close_all() SQLiteCacheDB.delInstance() self.tearDownCleanup()
def setUp(self): self.db = SQLiteCacheDB.getInstance() self.db.initDB(TRIBLER_DB_PATH_BACKUP, lib=0) permid = {} permid[3127] = 'MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAcPezgQ13k1MSOaUrCPisWRhYuNT7Tm+q5rUgHFvAWd9b+BcSut6TCniEgHYHDnQ6TH/vxQBqtY8Loag' permid[994] = 'MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAJUNmwvDaigRaM4cj7cE2O7lessqnnFEQsan7df9AZS8xeNmVsP/XXVrEt4t7e2TNicYmjn34st/sx2P' permid[19] = 'MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAAJv2YLuIWa4QEdOEs4CPRxQZDwZphKd/xK/tgbcALG198nNdT10znJ2sZYl+OJIvj7YfYp75PrrnWNX' permid[5] = 'MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAAB0XbUrw5b8CrTrMZST1SPyrzjgSzIE6ynALtlZASGAb+figVXRRGpKW6MSal3KnEm1/q0P3JPWrhCE' self.permid = permid db = MyPreferenceDBHandler.getInstance() db.loadData()
def setUp(self): self.file_path = tempfile.mktemp() self.db_path = tempfile.mktemp() #print >>sys.stderr,"test: file_path",self.file_path #print >>sys.stderr,"test: db_path",self.db_path self.writeSuperPeers() head,tail = os.path.split(self.file_path) self.config = {'install_dir':head, 'superpeer_file':tail} self.db = SQLiteCacheDB.getInstance() self.db.initDB(self.db_path, CREATE_SQL_FILE, check_version=False) self.splist = SuperPeerDBHandler.getInstance()
def write_data(self): db = SQLiteCacheDB.getInstance() #db.begin() db.insert('person', lastname='a', firstname='b') values = [] for i in range(100): value = (str(i), str(i**2)) values.append(value) db.insertMany('person', values) db.commit() #db.begin() db.commit() db.commit() db.close()
class AbstractDB(TriblerCoreTest): def setUpPreSession(self): self.config = TriblerConfig(ConfigObj(configspec=CONFIG_SPEC_PATH)) self.config.set_state_dir(self.getStateDir()) self.config.set_torrent_checking_enabled(False) self.config.set_megacache_enabled(False) self.config.set_dispersy_enabled(False) self.config.set_mainline_dht_enabled(False) self.config.set_torrent_collecting_enabled(False) self.config.set_libtorrent_enabled(False) self.config.set_video_server_enabled(False) self.config.set_torrent_store_enabled(False) @inlineCallbacks def setUp(self): yield super(AbstractDB, self).setUp() self.setUpPreSession() self.session = Session(self.config) tar = tarfile.open(os.path.join(TESTS_DATA_DIR, 'bak_new_tribler.sdb.tar.gz'), 'r|gz') tar.extractall(self.session_base_dir) db_path = os.path.join(self.session_base_dir, 'bak_new_tribler.sdb') self.sqlitedb = SQLiteCacheDB(db_path, busytimeout=BUSYTIMEOUT) self.session.sqlite_db = self.sqlitedb @inlineCallbacks def tearDown(self): self.sqlitedb.close() self.sqlitedb = None self.session = None yield super(AbstractDB, self).tearDown()
def basic_funcs(self): db = SQLiteCacheDB.getInstance() create_sql = "create table person(lastname, firstname);" db.createDBTable(create_sql, self.db_path) db.insert('person', lastname='a', firstname='b') one = db.fetchone('select * from person') assert one == ('a','b') one = db.fetchone("select lastname from person where firstname == 'b'") assert one == 'a' one = db.fetchone("select lastname from person where firstname == 'c'") assert one == None values = [] for i in range(100): value = (str(i), str(i**2)) values.append(value) db.insertMany('person', values) all = db.fetchall('select * from person') assert len(all) == 101 all = db.fetchall("select * from person where lastname=='101'") assert all == [] num = db.size('person') assert num == 101 db.insert('person', lastname='1', firstname='abc') one = db.fetchone("select firstname from person where lastname == '1'") assert one == '1' or one == 'abc' all = db.fetchall("select firstname from person where lastname == '1'") assert len(all) == 2 db.update('person', "lastname == '2'", firstname='56') one = db.fetchone("select firstname from person where lastname == '2'") assert one == '56', one db.update('person', "lastname == '3'", firstname=65) one = db.fetchone("select firstname from person where lastname == '3'") assert one == 65, one db.update('person', "lastname == '4'", firstname=654, lastname=44) one = db.fetchone("select firstname from person where lastname == 44") assert one == 654, one db.close()
def test_upgrade_from_obsolete_version(self): """We no longer support DB versions older than 17 (Tribler 6.0)""" dbpath = init_bak_tribler_sdb(u"bak_old_tribler.sdb", destination_path=self.getStateDir(), overwrite=True) self.sqlitedb = SQLiteCacheDB(self.session) self.sqlitedb.initialize(dbpath) class MockTorrentStore(object): def flush(): pass def close(): pass db_migrator = DBUpgrader(self.session, self.sqlitedb, torrent_store=MockTorrentStore()) self.assertRaises(VersionNoLongerSupportedError, db_migrator.start_migrate)