def load_file(track, filesystem_monitor: FilesystemMonitor): """ Loads a given track into the player. :param track: track to be loaded """ global __current_track global __player if get_gst_player_state() == Gst.State.PLAYING: save_current_track_position() save_current_book_position(__current_track) __current_track = track emit_event("stop") __player.set_state(Gst.State.NULL) init() if filesystem_monitor.is_track_online(track): path = track.file else: path = OfflineCache().get_cached_path(track) if not path: path = track.file __player.set_property("uri", "file://" + path) __player.set_state(Gst.State.PAUSED) save_current_book_position(__current_track) Settings.update(last_played_book=__current_track.book).execute() Book.update(last_played=int(time.time())).where( Book.id == __current_track.book.id).execute() emit_event("track-changed", track)
def init_db(): _connect_db(_db) sqlite_version = ".".join([str(num) for num in _db.server_version]) log.info("SQLite version: {}".format(sqlite_version)) if Settings.table_exists(): update_db() else: _db.create_tables([ Track, Book, Settings, ArtworkCache, Storage, StorageBlackList, OfflineCache ]) _db.stop() _db.start() while not _db.table_exists("settings"): time.sleep(0.01) _db.bind([ Book, Track, Settings, ArtworkCache, StorageBlackList, OfflineCache, Storage ], bind_refs=False, bind_backrefs=False) if (Settings.select().count() == 0): Settings.create(path="", last_played_book=None) # TODO: Properly handle errors within the database # Remove this later. It prevents empty book objects in the database clean_books()
def peewee_database(): from cozy.db.track import Track from cozy.db.book import Book from cozy.db.settings import Settings from cozy.db.storage_blacklist import StorageBlackList from cozy.db.storage import Storage db_path, models, test_db = prepare_db() path_of_test_folder = os.path.dirname(os.path.realpath(__file__)) + '/' with open(path_of_test_folder + 'books.json') as json_file: book_data = json.load(json_file) with open(path_of_test_folder + 'tracks.json') as json_file: track_data = json.load(json_file) Book.insert_many(book_data).execute() for chunk in chunks(track_data, 25): Track.insert_many(chunk).execute() with open(path_of_test_folder + 'storages.json') as json_file: storage_data = json.load(json_file) Storage.insert_many(storage_data).execute() Settings.create(path="", last_played_book=Book.get()) StorageBlackList.create(path="/path/to/replace/test1.mp3") StorageBlackList.create(path="/path/to/not/replace/test2.mp3") print("Provide database...") yield test_db teardown_db(db_path, models, test_db)
def __update_db_3(db): current_path = Settings.get().path db.create_tables([Storage]) Storage.create(path=current_path, default=True) Settings.update(path="NOT_USED").execute() Settings.update(version=3).execute()
def next_track(): """ Play the next track of the current book. Stops playback if there isn't any. """ global __current_track global __play_next album_tracks = get_tracks(get_current_track().book) current = get_current_track() index = list(album_tracks).index(current) next_track = None if index + 1 < len(album_tracks): next_track = album_tracks[index + 1] play_pause(None) save_current_track_position(0) if next_track: save_current_book_position(next_track) save_current_track_position(0, next_track) if __play_next: play_pause(next_track) else: load_file(next_track) __play_next = True else: stop() save_current_book_position(current, -1) unload() Settings.update(last_played_book=None).execute() emit_event("stop")
def __update_db_4(db): migrator = SqliteMigrator(db) last_played = IntegerField(default=0) migrate(migrator.add_column('book', 'last_played', last_played), ) Settings.update(version=4).execute()
def __update_db_2(db): migrator = SqliteMigrator(db) playback_speed = FloatField(default=1.0) migrate(migrator.add_column('book', 'playback_speed', playback_speed), ) Settings.update(version=2).execute()
def __update_db_8(db): db.execute_sql('UPDATE track SET modified=0 WHERE crc32=1') migrator: SqliteMigrator = SqliteMigrator(db) migrate(migrator.drop_column("track", "crc32")) Settings.update(version=8).execute()
def clean_books(): """ Remove all books that have no tracks """ for book in Book.select(): if not get_track_for_playback(book): Book.update(position=0).where(Book.id == book.id).execute() if Track.select().where(Track.book == book).count() < 1: if Settings.get().last_played_book and Settings.get().last_played_book.id == book.id: Settings.update(last_played_book=None).execute() book.delete_instance()
def test_fetching_non_existent_last_played_book_sets_it_to_none(peewee_database): from cozy.model.settings import Settings from cozy.db.settings import Settings as SettingsModel db_object = SettingsModel.get() db_object.last_played_book = 437878782 db_object.save(only=db_object.dirty_fields) settings = Settings() dummy = settings.last_played_book assert SettingsModel.get().last_played_book is None
def _update_db_10(db): log.info("Migrating to DB Version 10...") models = generate_models(db) migrator: SqliteMigrator = SqliteMigrator(db) if "track" in models["offlinecache"]._meta.sorted_field_names: log.info("Drop in OfflineCache: track_id...") migrate(migrator.drop_column("offlinecache", "track_id")) db.stop() db.start() Settings.update(version=10).execute()
def init_db(): tmp_db = None _connect_db(_db) sqlite_version = ".".join([str(num) for num in _db.server_version]) log.info("SQLite version: {}, APSW version: {}".format( sqlite_version, apswversion())) if Settings.table_exists(): update_db() else: tmp_db = PooledSqliteDatabase(os.path.join(get_data_dir(), "cozy.db")) if PeeweeVersion[0] == '2': tmp_db.create_tables([ Track, Book, Settings, ArtworkCache, Storage, StorageBlackList, OfflineCache ], True) else: with tmp_db.connection_context(): tmp_db.create_tables([ Track, Book, Settings, ArtworkCache, Storage, StorageBlackList, OfflineCache ]) # this is necessary to ensure that the tables have indeed been created if tmp_db: if PeeweeVersion[0] == '2': while not Settings.table_exists(): time.sleep(0.01) else: while not tmp_db.table_exists("settings"): time.sleep(0.01) _connect_db(_db) if PeeweeVersion[0] == '3': _db.bind([ Book, Track, Settings, ArtworkCache, StorageBlackList, OfflineCache, Storage ], bind_refs=False, bind_backrefs=False) if (Settings.select().count() == 0): Settings.create(path="", last_played_book=None) # TODO: Properly handle errors within the database # Remove this later. It prevents empty book objects in the database clean_books()
def load_last_book(filesystem_monitor: FilesystemMonitor): """ Load the last played book into the player. """ global __current_track global __player last_book = Settings.get().last_played_book if last_book and last_book.position != 0: query = Track.select().where(Track.id == last_book.position) if query.exists(): last_track = query.get() if last_track: __player.set_state(Gst.State.NULL) if filesystem_monitor.is_track_online(last_track): path = last_track.file else: path = OfflineCache().get_cached_path(last_track) if not path: return __player.set_property("uri", "file://" + path) __player.set_state(Gst.State.PAUSED) __current_track = last_track Book.update(last_played=int(time.time())).where( Book.id == last_book.id).execute() emit_event("track-changed", last_track)
def __update_db_6(db): migrator = SqliteMigrator(db) db.create_tables([OfflineCache]) external = BooleanField(default=False) offline = BooleanField(default=False) downloaded = BooleanField(default=False) migrate(migrator.add_column('storage', 'external', external), migrator.add_column('book', 'offline', offline), migrator.add_column('book', 'downloaded', downloaded)) Settings.update(version=6).execute() import shutil shutil.rmtree(get_cache_dir())
def do_activate(self): main_window_builder = self.ui.get_builder() self.app_controller = AppController(self, main_window_builder, self.ui) self.ui.activate(self.app_controller.library_view) if Settings.get().first_start: Settings.update(first_start=False).execute() path = os.path.join(Path.home(), _("Audiobooks")) Storage.create(path=path, default=True) os.makedirs(path, exist_ok=True) self.add_window(self.ui.window) mpris = MPRIS(self) mpris._on_current_changed()
def do_activate(self): main_window_builder = self.ui.get_builder() self.app_controller = AppController(main_window_builder, self.ui) self.ui.activate(self.app_controller.library_view) if Settings.get().first_start: Settings.update(first_start=False).execute() path = str(Path.home()) + "/Audiobooks" Settings.update(path=str(Path.home()) + "/Audiobooks").execute() if not os.path.exists(path): os.makedirs(path) self.add_window(self.ui.window) mpris = MPRIS(self) mpris._on_current_changed(None)
def test_setting_last_played_book_to_none_updates_in_settings_object_and_database(peewee_database): from cozy.model.settings import Settings from cozy.db.settings import Settings as SettingsModel settings = Settings() settings.last_played_book = None assert settings.last_played_book == None assert SettingsModel.get().last_played_book == None
def peewee_database_storage(): from cozy.db.storage import Storage from cozy.db.settings import Settings from cozy.db.storage_blacklist import StorageBlackList db_path, models, test_db = prepare_db() path_of_test_folder = os.path.dirname(os.path.realpath(__file__)) + '/' with open(path_of_test_folder + 'storages.json') as json_file: storage_data = json.load(json_file) Storage.insert_many(storage_data).execute() Settings.create(path="", last_played_book=None) StorageBlackList.create(path="/path/to/replace/test1.mp3") StorageBlackList.create(path="/path/to/not/replace/test2.mp3") print("Provide database...") yield test_db teardown_db(db_path, models, test_db)
def load_last_book(self): if Settings.get().last_played_book: self.update_track_ui() self.update_ui_time(self.progress_scale) cur_m, cur_s = player.get_current_duration_ui() self.__set_progress_scale_value(cur_m * 60 + cur_s) pos = int(player.get_current_track().position) if self._application_settings.replay: log.info("Replaying the previous 30 seconds.") amount = 30 * 1000000000 if (pos < amount): pos = 0 else: pos = pos - amount self.__set_progress_scale_value( int(pos / 1000000000 / self.ui.speed.get_speed()))
def update_db(): db = get_sqlite_database() # First test for version 1 try: next(c for c in db.get_columns("settings") if c.name == "version") except Exception as e: if len(db.get_tables()) == 0: data_dir = get_data_dir() if os.path.exists(os.path.join(data_dir, "cozy.db")): os.remove(os.path.join(data_dir, "cozy.db")) os.remove(os.path.join(data_dir, "cozy.db-shm")) os.remove(os.path.join(data_dir, "cozy.db-wal")) __update_db_1(db) version = Settings.get().version # then for version 2 and so on if version < 2: __update_db_2(db) if version < 3: __update_db_3(db) if version < 4: __update_db_4(db) if version < 5: __update_db_5(db) if version < 6: __update_db_6(db) if version < 7: __update_db_7(db) if version < 8: __update_db_8(db)
def _update_db_9(db): log.info("Migrating to DB Version 9...") models = generate_models(db) migrator: SqliteMigrator = SqliteMigrator(db) db.create_tables([File, TrackToFile]) db.stop() db.start() files: List[File] = [] track_to_files: List[TrackToFile] = [] file_id = 1 if "file" in models["track"]._meta.sorted_field_names: log.info("Generating File and TrackToFile objects...") for track in models["track"].select(): path = track.file file = next((f for f in files if f.path == path), None) if File.select().where(File.path == path).count() > 0: log.info("Path already existing in db: {}".format(path)) file = File.select().where(File.path == path).get() elif not file: file = File(path=path, modified=track.modified, id=file_id) files.append(file) file_id += 1 if TrackToFile.select().join(Track).where( TrackToFile.track.id == track.id).count() > 0: log.info("TrackToFile already existing in db: {}".format(path)) continue track_to_file = TrackToFile(track=track, file=file, start_at=0) track_to_files.append(track_to_file) log.info("Inserting File and TrackToFile objects...") File.bulk_create(files, batch_size=300) TrackToFile.bulk_create(track_to_files, batch_size=300) field = ForeignKeyField(File, null=True, field=File.id) if "cached_file" not in models["offlinecache"]._meta.sorted_field_names: log.info("Rename in OfflineCache: file to cached_file...") migrate(migrator.rename_column("offlinecache", "file", "cached_file"), ) if "original_file" not in models["offlinecache"]._meta.sorted_field_names: log.info("Add in OfflineCache: original_file_id...") migrate(migrator.add_column("offlinecache", "original_file_id", field)) db.stop() db.start() models = generate_models(db) if "file" in models["track"]._meta.sorted_field_names: log.info("Migrating OfflineCache...") for cache in models["offlinecache"].select(): file_query = File.select().where(File.path == cache.track.file) if file_query.count() < 0: cache.delete_instance() file = file_query.get() cache.original_file = file cache.save(only=cache.dirty_fields) if "file" in models["track"]._meta.sorted_field_names: log.info("Drop in Track: file...") migrate(migrator.drop_column("track", "file")) if "modified" in models["track"]._meta.sorted_field_names: log.info("Drop in Track: modified...") migrate(migrator.drop_column("track", "modified")) if "track_id" in models["offlinecache"]._meta.sorted_field_names: log.info("Drop in OfflineCache: track_id...") migrate(migrator.drop_column("offlinecache", "track_id")) migrate(migrator.add_not_null("offlinecache", "original_file_id")) db.stop() db.start() log.info("Reset modified on all m4b files") File.update(modified=0).where(fn.Lower( File.path).endswith("m4b")).execute() db.stop() db.start() Settings.update(version=9).execute()
def update_db(): db = get_sqlite_database() # First test for version 1 try: next(c for c in db.get_columns("settings") if c.name == "version") except Exception as e: if len(db.get_tables()) == 0: data_dir = get_data_dir() if os.path.exists(os.path.join(data_dir, "cozy.db")): os.remove(os.path.join(data_dir, "cozy.db")) os.remove(os.path.join(data_dir, "cozy.db-shm")) os.remove(os.path.join(data_dir, "cozy.db-wal")) __update_db_1(db) version = Settings.get().version # then for version 2 and so on if version < 2: __update_db_2(db) if version < 3: __update_db_3(db) if version < 4: __update_db_4(db) if version < 5: __update_db_5(db) if version < 6: __update_db_6(db) if version < 7: __update_db_7(db) if version < 8: __update_db_8(db) if version < 9: backup_dir_name = _backup_db(db) try: _update_db_9(db) except Exception as e: log.error(e) reporter.exception("db_updator", e) db.stop() _restore_db(backup_dir_name) from cozy.ui.db_migration_failed_view import DBMigrationFailedView dialog = DBMigrationFailedView() dialog.show() exit(1) if version < 10: backup_dir_name = _backup_db(db) try: _update_db_10(db) except Exception as e: log.error(e) reporter.exception("db_updator", e) db.stop() _restore_db(backup_dir_name) from cozy.ui.db_migration_failed_view import DBMigrationFailedView dialog = DBMigrationFailedView() dialog.show() exit(1)
def __init__(self): with self._db: self._db_object: SettingsModel = SettingsModel.get()
def __update_db_7(db): import cozy.control.artwork_cache as artwork_cache artwork_cache.delete_artwork_cache() Settings.update(version=7).execute()
def __update_db_5(db): db.create_tables([StorageBlackList]) Settings.update(version=5).execute()
def __update_db_7(db): from cozy.control.artwork_cache import ArtworkCache artwork_cache = ArtworkCache() artwork_cache.delete_artwork_cache() Settings.update(version=7).execute()