def test_removing_book_with_missing_file_removes_all_traces_in_db( peewee_database): from cozy.model.book import Book from cozy.db.book import Book as BookDB from cozy.db.track import Track from cozy.db.file import File from cozy.db.track_to_file import TrackToFile book = Book(peewee_database, 1) track_ids = [chapter.id for chapter in book.chapters] track_to_file_ids = [ track_to_file.id for track_to_file in TrackToFile.select().join( Track).where(TrackToFile.track.id << track_ids) ] file_ids = [ track_to_file.file.id for track_to_file in TrackToFile.select().join( Track).where(TrackToFile.track.id << track_ids) ] assert len(track_ids) > 0 assert len(track_to_file_ids) > 0 assert len(file_ids) > 0 File.get_by_id(file_ids[0]).delete_instance() book.remove() assert BookDB.select().where(BookDB.id == 1).count() == 0 assert Track.select().where(Track.id << track_ids).count() == 0 assert TrackToFile.select().where( TrackToFile.id << track_to_file_ids).count() == 0 assert File.select().where(File.id << file_ids).count() == 0
def insert_many(self, media_files: Set[MediaFile]): self._book_update_positions = [] files = self._prepare_files_db_objects(media_files) File.insert_many(files).execute() tracks = self._prepare_track_db_objects(media_files) self._insert_tracks(tracks) self._update_book_positions()
def _exchange_file(self, file: File): old_file_id = self._track_to_file_db_object.file.id self._track_to_file_db_object.file = file self._track_to_file_db_object.save( only=self._track_to_file_db_object.dirty_fields) if TrackToFile.select().join(File).where( TrackToFile.file.id == old_file_id).count() == 0: File.delete().where(File.id == old_file_id).execute()
def delete(self): file_id = self.file_id self._db_object.delete_instance(recursive=True) if TrackToFile.select().join(File).where( TrackToFile.file.id == file_id).count() == 0: File.delete().where(File.id == file_id).execute() self.emit_event("chapter-deleted", self) self.destroy_listeners()
def peewee_database(): from cozy.db.track import Track from cozy.db.book import Book from cozy.db.settings import Settings from cozy.db.storage_blacklist import StorageBlackList from cozy.db.storage import Storage from cozy.db.file import File from cozy.db.track_to_file import TrackToFile db_path, models, test_db = prepare_db() path_of_test_folder = os.path.dirname(os.path.realpath(__file__)) + '/' with open(path_of_test_folder + 'books.json') as json_file: book_data = json.load(json_file) with open(path_of_test_folder + 'tracks.json') as json_file: track_data = json.load(json_file) with open(path_of_test_folder + 'files.json') as json_file: file_data = json.load(json_file) with open(path_of_test_folder + 'track_to_file.json') as json_file: track_to_file_data = json.load(json_file) Book.insert_many(book_data).execute() for chunk in chunks(track_data, 25): Track.insert_many(chunk).execute() for chunk in chunks(file_data, 25): File.insert_many(chunk).execute() for chunk in chunks(track_to_file_data, 25): TrackToFile.insert_many(chunk).execute() with open(path_of_test_folder + 'storages.json') as json_file: storage_data = json.load(json_file) Storage.insert_many(storage_data).execute() Settings.create(path="", last_played_book=Book.get()) StorageBlackList.create(path="/path/to/replace/test1.mp3") StorageBlackList.create(path="/path/to/not/replace/test2.mp3") print("Provide database...") yield test_db teardown_db(db_path, models, test_db)
def test_insert_track_inserts_all_rows_expected(): from cozy.model.database_importer import DatabaseImporter, TrackInsertRequest from cozy.db.book import Book from cozy.db.file import File from cozy.db.track_to_file import TrackToFile database_importer = DatabaseImporter() file = File.create(path="New File", modified=1234567) track_data = { "name": "Test", "number": 2, "disk": 2, "book": Book.select().where(Book.name == "Test Book").get(), "length": 123, "position": 0 } track = TrackInsertRequest(track_data, file, 1234) database_importer._insert_tracks([track]) track_to_file_query = TrackToFile.select().join(File).where( TrackToFile.file == file.id) assert track_to_file_query.count() == 1 track_to_file: TrackToFile = track_to_file_query.get() assert track_to_file.track.name == track_data["name"] assert track_to_file.track.number == track_data["number"] assert track_to_file.track.disk == track_data["disk"] assert track_to_file.track.book.id == Book.select().where( Book.name == "Test Book").get().id assert track_to_file.track.length == track_data["length"] assert track_to_file.track.position == track_data["position"]
def test_delete_all_tracks_from_db_does_as_it_says(): from cozy.media.media_file import MediaFile from cozy.media.chapter import Chapter from cozy.db.file import File from cozy.db.track import Track from cozy.db.track_to_file import TrackToFile from cozy.model.database_importer import DatabaseImporter database_importer = DatabaseImporter() chapter = Chapter("Ohne Aussicht auf Freiheit", 0, 1234567, 999) media_file = MediaFile( book_name="Test Book New", author="New Author2", reader="New Reader", disk=999, cover=b"cover", path="20.000 Meilen unter dem Meer/2-10 Ohne Aussicht auf Freiheit.m4a", modified=1234567, chapters=[chapter]) assert Track.select().where( Track.name == "Ohne Aussicht auf Freiheit").count() == 1 assert TrackToFile.select().join(File).where( TrackToFile.file.path == media_file.path).count() == 1 database_importer._delete_tracks_from_db(media_file) assert Track.select().where( Track.name == "Ohne Aussicht auf Freiheit").count() == 0 assert TrackToFile.select().join(File).where( TrackToFile.file.path == media_file.path).count() == 0 assert File.select().where(File.path == media_file.path).count() == 1
def file(self, new_file: str): file_query = File.select().where(File.path == new_file) if file_query.count() > 0: self._exchange_file(file_query.get()) else: self._create_new_file(new_file)
def test_delete_keeps_file_object_if_used_elsewhere(peewee_database): from cozy.db.file import File from cozy.model.track import Track track = Track(peewee_database, 230) file_id = track.file_id track.delete() assert File.get_or_none(file_id)
def test_update_files_db_objects_updates_modified_field(): from cozy.model.database_importer import DatabaseImporter from cozy.media.media_file import MediaFile from cozy.db.file import File media_file = MediaFile(book_name="New Book Name", author="New Author", reader="New Reader", disk=999, cover=b"cover", path="test.mp3", modified=12345678, chapters=[None]) database_importer = DatabaseImporter() file = File.select().where(File.path == "test.mp3").get() file_objects = database_importer._update_files_in_db(file, media_file) assert File.select().where( File.path == "test.mp3").get().modified == 12345678
def test_setting_file_gets_file_object_if_it_is_already_present_in_database(peewee_database): from cozy.db.track_to_file import TrackToFile from cozy.db.file import File from cozy.model.track import Track track = Track(peewee_database, 1) track.file = "file with multiple chapters.m4b" file = TrackToFile.get(TrackToFile.track == track.id).file assert track.file == "file with multiple chapters.m4b" assert file.path == "file with multiple chapters.m4b" assert File.select().where(File.id == 0).count() == 0
def test_setting_file_gets_file_object_if_it_is_already_present_in_database_but_preserves_old_file_if_still_used( peewee_database): from cozy.db.track_to_file import TrackToFile from cozy.db.file import File from cozy.model.track import Track track = Track(peewee_database, 230) track.file = "Changed path" file = TrackToFile.get(TrackToFile.track == track.id).file assert track.file == "Changed path" assert file.path == "Changed path" assert File.select().where(File.id == 229).count() == 1
def test_prepare_db_objects_creates_new_book(mocker): from cozy.model.database_importer import DatabaseImporter from cozy.media.media_file import MediaFile from cozy.media.chapter import Chapter from cozy.db.file import File database_importer = DatabaseImporter() spy = mocker.spy(database_importer, "_create_book_db_object") File.create(path="New test File", modified=1234567) chapter = Chapter("New Chapter", 0, 1234567, 999) media_file = MediaFile(book_name="Test Book New", author="New Author2", reader="New Reader", disk=999, cover=b"cover", path="New test File", modified=1234567, chapters=[chapter]) res_dict = database_importer._prepare_track_db_objects([media_file]) assert len(list(res_dict)) == 1 spy.assert_called_once()
def test_prepare_db_objects_updates_existing_book_regardless_of_spelling( mocker): from cozy.model.database_importer import DatabaseImporter from cozy.media.media_file import MediaFile from cozy.media.chapter import Chapter from cozy.db.file import File database_importer = DatabaseImporter() spy = mocker.spy(database_importer, "_update_book_db_object") File.create(path="New test File", modified=1234567) File.create(path="Another test File", modified=1234568) chapter = Chapter("New Chapter", 0, 1234567, 999) another_chapter = Chapter("Another Chapter", 0, 1234567, 999) media_file = MediaFile(book_name="TeSt bOOk", author="New Author2", reader="New Reader", disk=999, cover=b"cover", path="New test File", modified=1234567, chapters=[chapter]) another_media_file = MediaFile(book_name="TEST BOOK", author="New Author2", reader="New Reader", disk=999, cover=b"cover", path="Another test File", modified=1234568, chapters=[another_chapter]) res_dict = database_importer._prepare_track_db_objects( [media_file, another_media_file]) assert len(list(res_dict)) == 2 spy.assert_called_once()
def _prepare_track_db_objects( self, media_files: Set[MediaFile]) -> Set[TrackInsertRequest]: book_db_objects: Set[BookModel] = set() for media_file in media_files: if not media_file: continue book = next((book for book in book_db_objects if is_same_book(book.name, media_file.book_name)), None) file_query = File.select().where(File.path == media_file.path) if not file_query.exists(): log.error("No file object with path present: {}".format( media_file.path)) continue file = file_query.get() if not book: book = self._import_or_update_book(media_file) book_db_objects.add(book) try: book_model = Book(self._db, book.id) progress = book_model.progress except BookIsEmpty: progress = 0 self._delete_tracks_from_db(media_file) tracks = self._get_track_list_for_db(media_file, book) for track in tracks: start_at = track.pop("startAt") yield TrackInsertRequest(track, file, start_at) update_position_request_present = any( b.book_id == book.id for b in self._book_update_positions) if progress > 0 and not update_position_request_present: self._book_update_positions.append( BookUpdatePositionRequest(book.id, progress))
def _prepare_files_db_objects(self, media_files: Set[MediaFile]) -> List[object]: files = [] for media_file in media_files: query = File.select().where(File.path == media_file.path) if query.exists(): self._update_files_in_db(query.get(), media_file) continue file_already_in_list = any(f["path"] == media_file.path for f in files) if not file_already_in_list: files.append({ "path": media_file.path, "modified": media_file.modified }) return files
def _update_db_9(db): log.info("Migrating to DB Version 9...") models = generate_models(db) migrator: SqliteMigrator = SqliteMigrator(db) db.create_tables([File, TrackToFile]) db.stop() db.start() files: List[File] = [] track_to_files: List[TrackToFile] = [] file_id = 1 if "file" in models["track"]._meta.sorted_field_names: log.info("Generating File and TrackToFile objects...") for track in models["track"].select(): path = track.file file = next((f for f in files if f.path == path), None) if File.select().where(File.path == path).count() > 0: log.info("Path already existing in db: {}".format(path)) file = File.select().where(File.path == path).get() elif not file: file = File(path=path, modified=track.modified, id=file_id) files.append(file) file_id += 1 if TrackToFile.select().join(Track).where( TrackToFile.track.id == track.id).count() > 0: log.info("TrackToFile already existing in db: {}".format(path)) continue track_to_file = TrackToFile(track=track, file=file, start_at=0) track_to_files.append(track_to_file) log.info("Inserting File and TrackToFile objects...") File.bulk_create(files, batch_size=300) TrackToFile.bulk_create(track_to_files, batch_size=300) field = ForeignKeyField(File, null=True, field=File.id) if "cached_file" not in models["offlinecache"]._meta.sorted_field_names: log.info("Rename in OfflineCache: file to cached_file...") migrate(migrator.rename_column("offlinecache", "file", "cached_file"), ) if "original_file" not in models["offlinecache"]._meta.sorted_field_names: log.info("Add in OfflineCache: original_file_id...") migrate(migrator.add_column("offlinecache", "original_file_id", field)) db.stop() db.start() models = generate_models(db) if "file" in models["track"]._meta.sorted_field_names: log.info("Migrating OfflineCache...") for cache in models["offlinecache"].select(): file_query = File.select().where(File.path == cache.track.file) if file_query.count() < 0: cache.delete_instance() file = file_query.get() cache.original_file = file cache.save(only=cache.dirty_fields) if "file" in models["track"]._meta.sorted_field_names: log.info("Drop in Track: file...") migrate(migrator.drop_column("track", "file")) if "modified" in models["track"]._meta.sorted_field_names: log.info("Drop in Track: modified...") migrate(migrator.drop_column("track", "modified")) if "track_id" in models["offlinecache"]._meta.sorted_field_names: log.info("Drop in OfflineCache: track_id...") migrate(migrator.drop_column("offlinecache", "track_id")) migrate(migrator.add_not_null("offlinecache", "original_file_id")) db.stop() db.start() log.info("Reset modified on all m4b files") File.update(modified=0).where(fn.Lower( File.path).endswith("m4b")).execute() db.stop() db.start() Settings.update(version=9).execute()
def _update_files_in_db(self, file: File, media_file: MediaFile): file.modified = media_file.modified file.save(only=file.dirty_fields)
def reset_modified_date_for_all(): File.update(modified=0).execute()