Esempio n. 1
0
 def _del_tag(self, tag: str) -> None:
     '''Delete tag from all tracks in the album.'''
     logger.debug("Deleting %s in all tracks in %s.", tag, self.track_set_key_string())
     for t in self.RGTracks.values():
         try:
             del t.track[tag]
         except KeyError: pass
Esempio n. 2
0
    def cleanup_tags(self) -> None:
        '''Delete any ReplayGain tags from track.

        This dicards any unsaved changes, then modifies and saves the
        track's tags on disk and then reloads the new tags from
        disk.

        '''
        tags_to_clean = set(rg_tags) # type: Set[str]
        tags_to_clean.update('QuodLibet::' + tag for tag in rg_tags)
        tags_to_clean.update('TXXX:' + tag for tag in rg_tags)
        tags_to_clean.update(['RVA2:track', 'RVA2:album'])
        tags_to_clean = { tag.lower() for tag in tags_to_clean }
        # Need a non-easy interface for proper ID3 cleanup
        t = MusicFile(self.filename, easy=False)
        tags_to_delete = []
        for k in t.keys():
            if k.lower() in tags_to_clean:
                tags_to_delete.append(k)
        for k in tags_to_delete:
            logger.debug("Deleting tag: %s", repr(k))
            del t[k]
        t.save()
        # Re-init to pick up tag changes
        new_track = type(self.track)(self.filename)
        self.track = new_track
Esempio n. 3
0
    def cleanup_tags(self) -> None:
        '''Delete any ReplayGain tags from track.

        This dicards any unsaved changes, then modifies and saves the
        track's tags on disk and then reloads the new tags from
        disk.

        '''
        tags_to_clean = set(rg_tags) # type: Set[str]
        tags_to_clean.update('QuodLibet::' + tag for tag in rg_tags)
        tags_to_clean.update('TXXX:' + tag for tag in rg_tags)
        tags_to_clean.update(['RVA2:track', 'RVA2:album'])
        tags_to_clean = { tag.lower() for tag in tags_to_clean }
        # Need a non-easy interface for proper ID3 cleanup
        t = MusicFile(self.filename, easy=False)
        tags_to_delete = []
        for k in t.keys():
            if k.lower() in tags_to_clean:
                tags_to_delete.append(k)
        for k in tags_to_delete:
            logger.debug("Deleting tag: %s", repr(k))
            del t[k]
        t.save()
        # Re-init to pick up tag changes
        new_track = type(self.track)(self.filename)
        self.track = new_track
Esempio n. 4
0
 def _del_tag(self, tag: str) -> None:
     '''Delete tag from all tracks in the album.'''
     logger.debug("Deleting %s in all tracks in %s.", tag, self.track_set_key_string())
     for t in self.RGTracks.values():
         try:
             del t.track[tag]
         except KeyError: pass
Esempio n. 5
0
    def compute_gain(self, fnames: Iterable[str], album: bool = True) -> Dict[str, Dict[str, float]]:
        fnames = list(fnames)
        basenames_to_fnames = { os.path.basename(f): f for f in fnames }
        if len(basenames_to_fnames) != len(fnames):
            raise ValueError("The bs1770gain backend cannot handle multiple files with the same basename.")
        cmd = [bs1770gain_path, '--replaygain', '--integrated', '--samplepeak', '--xml', ] + fnames
        logger.debug("Running command: %s", repr(cmd))
        p = Popen(cmd, stdout=PIPE)
        xml_text = p.communicate()[0].decode(sys.getdefaultencoding())
        if p.wait() != 0:
            raise CalledProcessError(p.returncode, p.args) # type: ignore

        tree = etree.fromstring(xml_text).xpath(".")[0] # type: ignore # https://github.com/python/typeshed/issues/525
        ainfo = tree.xpath("/bs1770gain/album/summary")[0]
        album_gain = float(ainfo.xpath("./integrated/@lu")[0])
        album_peak = float(ainfo.xpath("./sample-peak/@factor")[0])
        tracks = tree.xpath("/bs1770gain/album/track")
        rginfo = {}
        for tinfo in tracks:
            track_name = tinfo.xpath("./@file")[0]
            track_gain = float(tinfo.xpath("./integrated/@lu")[0])
            track_peak = float(tinfo.xpath("./sample-peak/@factor")[0])
            rginfo[basenames_to_fnames[track_name]] = {
                "replaygain_track_gain": track_gain,
                "replaygain_track_peak": track_peak,
                "replaygain_album_gain": album_gain,
                "replaygain_album_peak": album_peak,
            }
        return rginfo
Esempio n. 6
0
 def wrapped_handler(track_set: RGTrackSet) -> RGTrackSet:
     p = Process(target=handler, args=(track_set,))
     try:
         p.start()
         p.join()
         if p.exitcode != 0:
             logger.error("Subprocess exited with code %s for %s", p.exitcode, track_set.track_set_key_string())
     finally:
         if p.is_alive():
             logger.debug("Killing subprocess")
             p.terminate()
     return track_set
Esempio n. 7
0
 def wrapped_handler(track_set: RGTrackSet) -> RGTrackSet:
     p = Process(target=handler, args=(track_set,)) # type: ignore # https://github.com/python/mypy/issues/797
     try:
         p.start()
         p.join()
         if p.exitcode != 0:  # type: ignore
             logger.error("Subprocess exited with code %s for %s", p.exitcode, track_set.track_set_key_string())  # type: ignore
     finally:
         if p.is_alive():
             logger.debug("Killing subprocess")
             p.terminate()
     return track_set
Esempio n. 8
0
 def wrapped_handler(track_set: RGTrackSet) -> RGTrackSet:
     p = Process(target=handler, args=(
         track_set,
     ))  # type: ignore # https://github.com/python/mypy/issues/797
     try:
         p.start()
         p.join()
         if p.exitcode != 0:  # type: ignore
             logger.error("Subprocess exited with code %s for %s",
                          p.exitcode,
                          track_set.track_set_key_string())  # type: ignore
     finally:
         if p.is_alive():
             logger.debug("Killing subprocess")
             p.terminate()
     return track_set
Esempio n. 9
0
    def compute_gain(self,
                     fnames: Iterable[str],
                     album: bool = True) -> Dict[str, Dict[str, float]]:
        fnames = list(fnames)
        basenames_to_fnames = {os.path.basename(f): f for f in fnames}
        if len(basenames_to_fnames) != len(fnames):
            raise ValueError(
                "The bs1770gain backend cannot handle multiple files with the same basename."
            )
        cmd = [
            bs1770gain_path,
            '--replaygain',
            '--integrated',
            '--samplepeak',
            '--xml',
        ] + fnames
        logger.debug("Running command: %s", repr(cmd))
        p = Popen(cmd, stdout=PIPE)
        xml_text = p.communicate()[0].decode(sys.getdefaultencoding())
        if p.wait() != 0:
            raise CalledProcessError(p.returncode, p.args)  # type: ignore

        tree = etree.fromstring(xml_text).xpath(".")[
            0]  # type: ignore # https://github.com/python/typeshed/issues/525
        ainfo = tree.xpath("/bs1770gain/album/summary")[0]
        album_gain = float(ainfo.xpath("./integrated/@lu")[0])
        album_peak = float(ainfo.xpath("./sample-peak/@factor")[0])
        tracks = tree.xpath("/bs1770gain/album/track")
        rginfo = {}
        for tinfo in tracks:
            track_name = tinfo.xpath("./@file")[0]
            track_gain = float(tinfo.xpath("./integrated/@lu")[0])
            track_peak = float(tinfo.xpath("./sample-peak/@factor")[0])
            rginfo[basenames_to_fnames[track_name]] = {
                "replaygain_track_gain": track_gain,
                "replaygain_track_peak": track_peak,
                "replaygain_album_gain": album_gain,
                "replaygain_album_peak": album_peak,
            }
        return rginfo
Esempio n. 10
0
def get_all_music_files (paths: Iterable[str], ignore_hidden: bool = True) -> Iterable[MusicFileType]:
    '''Recursively search in one or more paths for music files.

    By default, hidden files and directories are ignored.

    '''
    paths = map(fullpath, paths)
    for p in remove_redundant_paths(paths):
        if os.path.isdir(p):
            files = []          # type: Iterable[str]
            for root, dirs, files in os.walk(p, followlinks=True):
                logger.debug("Searching for music files in %s", repr(root))
                if ignore_hidden:
                    # Modify dirs in place to cut off os.walk
                    dirs[:] = list(remove_hidden_paths(dirs))
                    files = remove_hidden_paths(files)
                files = filter(lambda f: is_music_file(os.path.join(root, f)), files)
                for f in files:
                    yield MusicFile(os.path.join(root, f), easy=True)
        else:
            logger.debug("Checking for music files at %s", repr(p))
            f = MusicFile(p, easy=True)
            if f is not None:
                yield f
Esempio n. 11
0
def get_all_music_files (paths: Iterable[str], ignore_hidden: bool = True) -> Iterable[MusicFileType]:
    '''Recursively search in one or more paths for music files.

    By default, hidden files and directories are ignored.

    '''
    paths = map(fullpath, paths)
    for p in remove_redundant_paths(paths):
        if os.path.isdir(p):
            files = []          # type: Iterable[str]
            for root, dirs, files in os.walk(p, followlinks=True):
                logger.debug("Searching for music files in %s", repr(root))
                if ignore_hidden:
                    # Modify dirs in place to cut off os.walk
                    dirs[:] = list(remove_hidden_paths(dirs))
                    files = remove_hidden_paths(files)
                files = filter(lambda f: is_music_file(os.path.join(root, f)), files)
                for f in files:
                    yield MusicFile(os.path.join(root, f), easy=True)
        else:
            logger.debug("Checking for music files at %s", repr(p))
            f = MusicFile(p, easy=True)
            if f is not None:
                yield f
Esempio n. 12
0
def is_music_file(file: str) -> bool:
    # Exists?
    if not os.path.exists(file):
        logger.debug("File %s does not exist", repr(file))
        return False
    if not os.path.getsize(file) > 0:
        logger.debug("File %s has zero size", repr(file))
        return False
    # Readable by Mutagen?
    try:
        if not MusicFile(file):
            logger.debug("File %s is not recognized by Mutagen", repr(file))
            return False
    except Exception:
        logger.debug("File %s is not recognized", repr(file))
        return False
    # OK!
    return True
Esempio n. 13
0
def is_music_file(file: str) -> bool:
    # Exists?
    if not os.path.exists(file):
        logger.debug("File %s does not exist", repr(file))
        return False
    if not os.path.getsize(file) > 0:
        logger.debug("File %s has zero size", repr(file))
        return False
    # Readable by Mutagen?
    try:
        if not MusicFile(file):
            logger.debug("File %s is not recognized by Mutagen", repr(file))
            return False
    except Exception:
        logger.debug("File %s is not recognized", repr(file))
        return False
    # OK!
    return True
Esempio n. 14
0
def register_backend(name: str, obj: GainComputer) -> None:
    '''Backends modules should call this to register a GainComputer object.'''
    if not isinstance(obj, GainComputer):
        raise TypeError("Backend must be a GainComputer instance.")
    logger.debug("Registering backend %s: %s", name, repr(obj))
    backends[name] = obj
Esempio n. 15
0
def register_backend(name: str, obj: GainComputer) -> None:
    '''Backends modules should call this to register a GainComputer object.'''
    if not isinstance(obj, GainComputer):
        raise TypeError("Backend must be a GainComputer instance.")
    logger.debug("Registering backend %s: %s", name, repr(obj))
    backends[name] = obj
Esempio n. 16
0
 def fset(self, value) -> None:
     logger.debug("Setting %s to %s for %s" % (tag, value, self.filename))
     if value is None:
         del self.album_peak
     else:
         self.track[tag] = format_peak(value)
Esempio n. 17
0
 def _set_tag(self, tag: str, value: Any) -> None:
     '''Set tag to value in all tracks in the album.'''
     logger.debug("Setting %s to %s in all tracks in %s.", tag, value, self.track_set_key_string())
     for t in self.RGTracks.values():
         t.track[tag] = str(value)
Esempio n. 18
0
 def _set_tag(self, tag: str, value: Any) -> None:
     '''Set tag to value in all tracks in the album.'''
     logger.debug("Setting %s to %s in all tracks in %s.", tag, value, self.track_set_key_string())
     for t in self.RGTracks.values():
         t.track[tag] = str(value)
Esempio n. 19
0
 def fset(self, value) -> None:
     logger.debug("Setting %s to %s for %s" % (tag, value, self.filename))
     if value is None:
         del self.album_peak
     else:
         self.track[tag] = format_peak(value)
Esempio n. 20
0
 def fset(self, value: float) -> None:
     logger.debug("Setting %s to %s for %s" % (tag, value, self.filename))
     if value is None:
         del self.gain
     else:
         self.track[tag] = format_gain(value)
Esempio n. 21
0
def main(force_reanalyze: bool = False,
         include_hidden: bool = False,
         dry_run: bool = False,
         gain_type: str = 'auto',
         backend: str = 'auto',
         jobs: int = default_job_count(),
         low_memory: bool = False,
         quiet: bool = False,
         verbose: bool = False,
         *music_dir: str,
         ) -> None:
    '''Add replaygain tags to your music files.'''

    try:
        from tqdm import tqdm
    except ImportError:
        # Fallback: No progress bars
        tqdm = tqdm_fake
    if quiet:
        logger.setLevel(logging.WARN)
        tqdm = tqdm_fake
    elif verbose:
        logger.setLevel(logging.DEBUG)
    else:
        logger.setLevel(logging.INFO)

    if backend == 'auto':
        backend_exceptions: List[BackendUnavailableException] = []
        for bname in known_backends:
            try:
                gain_backend = get_backend(bname)
                logger.info('Selected the {} backend to compute ReplayGain'.format(bname))
                break
            except BackendUnavailableException as ex:
                backend_exceptions.append(ex)
        else:
            for exc in backend_exceptions:
                logger.error(exc.args[0])
            logger.error('Could not find any usable backends. Perhaps you have not installed the prerequisites?')
            sys.exit(1)
    else:
        try:
            gain_backend = get_backend(backend)
            logger.info('Using the {} backend to compute ReplayGain'.format(backend))
        except BackendUnavailableException as ex:
            logger.error(ex.args[0])
            sys.exit(1)

    track_constructor = RGTrack
    if dry_run:
        logger.warn('This script is running in "dry run" mode, so no files will actually be modified.')
        track_constructor = RGTrackDryRun
    if len(music_dir) == 0:
        logger.error("You did not specify any music directories or files. Exiting.")
        sys.exit(1)
    music_directories = list(unique(map(fullpath, music_dir)))
    logger.info("Searching for music files in the following locations:\n%s", "\n".join(music_directories),)
    all_music_files = get_all_music_files(music_directories,
                                          ignore_hidden=(not include_hidden))
    if low_memory:
        tracks = map(track_constructor, all_music_files)
        track_sets = RGTrackSet.MakeTrackSets(tracks, gain_backend=gain_backend)
    else:
        tracks = map(track_constructor, tqdm(all_music_files, desc="Searching"))
        track_sets = list(RGTrackSet.MakeTrackSets(tracks, gain_backend=gain_backend))
        if len(track_sets) == 0:
            logger.error("Failed to find any tracks in the directories you specified. Exiting.")
            sys.exit(1)
        if (jobs > len(track_sets)):
            jobs = len(track_sets)

    logger.info("Beginning analysis")

    handler = TrackSetHandler(force=force_reanalyze, gain_type=gain_type, dry_run=dry_run, verbose=verbose)
    # Wrapper that runs the handler in a subprocess, allowing for
    # parallel operation
    def wrapped_handler(track_set: RGTrackSet) -> RGTrackSet:
        p = Process(target=handler, args=(track_set,))
        try:
            p.start()
            p.join()
            if p.exitcode != 0:
                logger.error("Subprocess exited with code %s for %s", p.exitcode, track_set.track_set_key_string())
        finally:
            if p.is_alive():
                logger.debug("Killing subprocess")
                p.terminate()
        return track_set

    pool = None
    try:
        if jobs <= 1:
            # Sequential
            handled_track_sets = map(handler, track_sets)
        else:
            # Parallel (Using process pool doesn't work, so instead we
            # use Process instance within each thread)
            pool = ThreadPool(jobs)
            handled_track_sets = pool.imap_unordered(wrapped_handler, track_sets)
        # Wait for completion
        iter_len = None if low_memory else len(cast(Sized, track_sets))
        for ts in tqdm(handled_track_sets, total=iter_len, desc="Analyzing"):
            pass
        logger.info("Analysis complete.")
    except KeyboardInterrupt:
        if pool is not None:
            logger.debug("Terminating process pool")
            pool.terminate()
            pool = None
        raise
    finally:
        if pool is not None:
            logger.debug("Closing transcode process pool")
            pool.close()
    if dry_run:
        logger.warn('This script ran in "dry run" mode, so no files were actually modified.')
    pass
Esempio n. 22
0
 def fset(self, value: float) -> None:
     logger.debug("Setting %s to %s for %s" % (tag, value, self.filename))
     if value is None:
         del self.gain
     else:
         self.track[tag] = format_gain(value)
Esempio n. 23
0
def main(force_reanalyze: bool = False,
         include_hidden: bool = False,
         dry_run: bool = False,
         gain_type: str = 'auto',
         backend: str = 'auto',
         jobs: int = default_job_count(),
         low_memory: bool = False,
         quiet: bool = False,
         verbose: bool = False,
         *music_dir: str):
    '''Add replaygain tags to your music files.'''

    try:
        from tqdm import tqdm
    except ImportError:
        # Fallback: No progress bars
        tqdm = tqdm_fake
    if quiet:
        logger.setLevel(logging.WARN)
        tqdm = tqdm_fake
    elif verbose:
        logger.setLevel(logging.DEBUG)
    else:
        logger.setLevel(logging.INFO)

    if backend == 'auto':
        for bname in known_backends:
            try:
                gain_backend = get_backend(bname)
                logger.info("Selected the %s backend to compute ReplayGain",
                            bname)
                break
            except BackendUnavailableException:
                pass
        else:
            raise BackendUnavailableException(
                "Could not find any usable backends.")
    else:
        gain_backend = get_backend(backend)
        logger.info("Using the %s backend to compute ReplayGain", backend)

    track_constructor = RGTrack
    if dry_run:
        logger.warn(
            'This script is running in "dry run" mode, so no files will actually be modified.'
        )
        track_constructor = RGTrackDryRun
    if len(music_dir) == 0:
        logger.error(
            "You did not specify any music directories or files. Exiting.")
        sys.exit(1)
    music_directories = list(unique(map(fullpath, music_dir)))
    logger.info(
        "Searching for music files in the following locations:\n%s",
        "\n".join(music_directories),
    )
    all_music_files = get_all_music_files(music_directories,
                                          ignore_hidden=(not include_hidden))
    if low_memory:
        tracks = map(track_constructor, all_music_files)
        track_sets = RGTrackSet.MakeTrackSets(tracks,
                                              gain_backend=gain_backend)
    else:
        tracks = map(track_constructor, tqdm(all_music_files,
                                             desc="Searching"))
        track_sets = list(
            RGTrackSet.MakeTrackSets(tracks, gain_backend=gain_backend))
        if len(track_sets) == 0:
            logger.error(
                "Failed to find any tracks in the directories you specified. Exiting."
            )
            sys.exit(1)
        if (jobs > len(track_sets)):
            jobs = len(track_sets)

    logger.info("Beginning analysis")

    handler = TrackSetHandler(force=force_reanalyze,
                              gain_type=gain_type,
                              dry_run=dry_run,
                              verbose=verbose)

    # Wrapper that runs the handler in a subprocess, allowing for
    # parallel operation
    def wrapped_handler(track_set: RGTrackSet) -> RGTrackSet:
        p = Process(target=handler, args=(
            track_set,
        ))  # type: ignore # https://github.com/python/mypy/issues/797
        try:
            p.start()
            p.join()
            if p.exitcode != 0:  # type: ignore
                logger.error("Subprocess exited with code %s for %s",
                             p.exitcode,
                             track_set.track_set_key_string())  # type: ignore
        finally:
            if p.is_alive():
                logger.debug("Killing subprocess")
                p.terminate()
        return track_set

    pool = None
    try:
        if jobs <= 1:
            # Sequential
            handled_track_sets = map(
                handler, track_sets
            )  # type: ignore # https://github.com/python/mypy/issues/797
        else:
            # Parallel (Using process pool doesn't work, so instead we
            # use Process instance within each thread)
            pool = ThreadPool(jobs)
            handled_track_sets = pool.imap_unordered(
                wrapped_handler, track_sets
            )  # type: ignore # https://github.com/python/typeshed/issues/683
        # Wait for completion
        iter_len = None if low_memory else len(cast(Sized, track_sets))
        for ts in tqdm(handled_track_sets, total=iter_len, desc="Analyzing"):
            pass
        logger.info("Analysis complete.")
    except KeyboardInterrupt:
        if pool is not None:
            logger.debug("Terminating process pool")
            pool.terminate()
            pool = None
        raise
    finally:
        if pool is not None:
            logger.debug("Closing transcode process pool")
            pool.close()
    if dry_run:
        logger.warn(
            'This script ran in "dry run" mode, so no files were actually modified.'
        )
    pass