def get_local_metadata(self, path: str) -> "Metadata": """Collects Metadata about this song from the local system, either from the database if available or the filesystem. Used by localdrive and youtube.""" if not self.id: raise ValueError() get_metadata_from_fs = False try: # Try to read the metadata from the database archived_song = ArchivedSong.objects.get( url=self.get_external_url()) metadata = archived_song.get_metadata() if not metadata["cached"]: get_metadata_from_fs = True except ArchivedSong.DoesNotExist: get_metadata_from_fs = True # If this is not possible, or the metadata is not cached, read it from the file system if get_metadata_from_fs: metadata = song_utils.get_metadata(path) metadata["internal_url"] = self.get_internal_url() metadata["external_url"] = self.get_external_url() metadata["stream_url"] = None if not metadata["title"]: if not metadata["external_url"]: raise ValueError metadata["title"] = metadata["external_url"] return metadata
def handle(self, *args, **options): from core.models import ArchivedSong from core.musiq.song_provider import SongProvider for song in ArchivedSong.objects.all(): try: provider = SongProvider.create(external_url=song.url) except NotImplementedError: # For this song a provider is necessary that is not available # e.g. the song was played before, but the provider was disabled continue cached = provider.check_cached() if cached: # sync the metadata in the database with the file system # _get_path is defined for localdrive and youtube, # the only two providers that may be cached from core.musiq.local import LocalSongProvider from core.musiq.youtube import YoutubeSongProvider assert isinstance(provider, (YoutubeSongProvider, LocalSongProvider)) metadata = song_utils.get_metadata(provider.get_path()) song.artist = metadata["artist"] song.title = metadata["title"] song.duration = metadata["duration"] song.cached = True else: # keep old data but store that the song is not cached song.cached = False song.save()
def get_metadata(self) -> "Metadata": metadata = song_utils.get_metadata(self._get_path()) metadata["internal_url"] = self.get_internal_url() metadata["external_url"] = self.get_external_url() if not metadata["title"]: metadata["title"] = metadata["external_url"] return metadata
def get_metadata(self): metadata = song_utils.get_metadata(self.get_path()) metadata['internal_url'] = self.get_internal_url() metadata['external_url'] = self.get_external_url() if not metadata['title']: metadata['title'] = metadata['external_url'] return metadata
def get_metadata(self) -> "Metadata": if not self.id: raise ValueError() metadata = song_utils.get_metadata(self._get_path()) metadata["internal_url"] = self.get_internal_url() metadata["external_url"] = "https://www.youtube.com/watch?v=" + self.id if not metadata["title"]: metadata["title"] = metadata["external_url"] return metadata
def get_alarm_metadata() -> "Metadata": """Returns a metadata object for the alarm. The duration is dynamically determined.""" return { "artist": "Raveberry", "title": "ALARM!", "duration": song_utils.get_metadata( os.path.join(conf.BASE_DIR, "resources/sounds/alarm.m4a"))["duration"], "internal_url": "alarm", "external_url": "https://raveberry.party/alarm", "stream_url": None, "cached": True, }
def _scan_files(library_path: str, filecount: int) -> Tuple[int, int]: last_update = time.time() files_scanned = 0 files_added = 0 for (dirpath, _, filenames) in os.walk(library_path): if os.path.abspath(dirpath) == os.path.abspath(conf.SONGS_CACHE_DIR): # do not add files handled by raveberry as local files continue now = time.time() if now - last_update > UPDATE_FREQUENCY: last_update = now _set_scan_progress( f"{filecount} / {files_scanned} / {files_added}") for filename in filenames: files_scanned += 1 path = os.path.join(dirpath, filename) try: metadata = song_utils.get_metadata(path) except (ValueError, MutagenError): # the given file could not be parsed and will not be added to the database pass else: library_relative_path = path[len(library_path) + 1:] external_url = os.path.join("local_library", library_relative_path) if not ArchivedSong.objects.filter(url=external_url).exists(): files_added += 1 ArchivedSong.objects.create( url=external_url, artist=metadata["artist"], title=metadata["title"], duration=metadata["duration"], counter=0, cached=metadata["cached"], ) return files_scanned, files_added
def _scan_library(self, library_path): self.scan_progress = '0 / 0 / 0' self.update_state() scan_start = time.time() last_update = scan_start update_frequency = 0.5 filecount = 0 for (dirpath, _, filenames) in os.walk(library_path): now = time.time() if now - last_update > update_frequency: last_update = now self.scan_progress = f'{filecount} / 0 / 0' self.update_state() if os.path.abspath(dirpath) == os.path.abspath(settings.SONGS_CACHE_DIR): # do not add files handled by raveberry as local files continue filecount += len(filenames) library_link = os.path.join(settings.SONGS_CACHE_DIR, 'local_library') try: os.remove(library_link) except FileNotFoundError: pass os.symlink(library_path, library_link) self.base.logger.info(f'started scanning in {library_path}') self.scan_progress = f'{filecount} / 0 / 0' self.update_state() files_scanned = 0 files_added = 0 for (dirpath, _, filenames) in os.walk(library_path): if os.path.abspath(dirpath) == os.path.abspath(settings.SONGS_CACHE_DIR): # do not add files handled by raveberry as local files continue now = time.time() if now - last_update > update_frequency: last_update = now self.scan_progress = f'{filecount} / {files_scanned} / {files_added}' self.update_state() for filename in filenames: files_scanned += 1 path = os.path.join(dirpath, filename) try: metadata = song_utils.get_metadata(path) except (ValueError, MutagenError): # the given file could not be parsed and will not be added to the database pass else: library_relative_path = path[len(library_path)+1:] external_url = os.path.join('local_library', library_relative_path) if not ArchivedSong.objects.filter(url=external_url).exists(): files_added += 1 ArchivedSong.objects.create(url=external_url, artist=metadata['artist'], title=metadata['title'], counter=0) assert files_scanned == filecount self.scan_progress = f'{filecount} / {files_scanned} / {files_added}' self.update_state() self.base.logger.info(f'done scanning in {library_path}')