Ejemplo n.º 1
0
 def report(self) -> None:
     '''Report calculated replay gain tags.'''
     for k in self.filenames:
         track = self.RGTracks[k]
         logger.info("Set track gain tags for %s:\n\tTrack Gain: %s\n\tTrack Peak: %s", track.filename, track.gain, track.peak)
     if self.want_album_gain():
         logger.info("Set album gain tags for %s:\n\tAlbum Gain: %s\n\tAlbum Peak: %s", self.track_set_key_string(), self.gain, self.peak)
     else:
         logger.info("Did not set album gain tags for %s.", self.track_set_key_string())
Ejemplo n.º 2
0
 def report(self) -> None:
     '''Report calculated replay gain tags.'''
     for k in self.filenames:
         track = self.RGTracks[k]
         logger.info("Set track gain tags for %s:\n\tTrack Gain: %s\n\tTrack Peak: %s", track.filename, track.gain, track.peak)
     if self.want_album_gain():
         logger.info("Set album gain tags for %s:\n\tAlbum Gain: %s\n\tAlbum Peak: %s", self.track_set_key_string(), self.gain, self.peak)
     else:
         logger.info("Did not set album gain tags for %s.", self.track_set_key_string())
Ejemplo n.º 3
0
    def do_gain(self, force: bool = False, gain_type: Union[None, str] = None,
                dry_run: bool = False, verbose: bool = False) -> None:
        '''Analyze all tracks in the album, and add replay gain tags
        to the tracks based on the analysis.

        If force is False (the default) and the album already has
        replay gain tags, then do nothing.

        gain_type can be one of "album", "track", or "auto", as
        described in the help. If provided to this method, it will sef
        the object's gain_type field.
        '''
        if gain_type is not None:
            self.gain_type = gain_type
        # This performs some additional checks
        gain_type = "album" if self.want_album_gain() else "track"
        assert gain_type in ("album", "track")
        if self.has_valid_rgdata():
            if force:
                logger.info("Forcing reanalysis of previously-analyzed track set %s", repr(self.track_set_key_string()))
            else:
                logger.info("Skipping previously-analyzed track set %s", repr(self.track_set_key_string()))
                return
        else:
            logger.info('Analyzing track set %s', repr(self.track_set_key_string()))
        rginfo = self.gain_backend.compute_gain(self.filenames)
        # Save track gains
        for fname in self.RGTracks.keys():
            track = self.RGTracks[fname]
            track_rginfo = rginfo[fname]
            (track.gain, track.peak) = (track_rginfo["replaygain_track_gain"], track_rginfo["replaygain_track_peak"]) # type: ignore
        # Set or unset album gain
        if gain_type == "album":
            album_rginfo = next(iter(rginfo.values()))
            (self.gain, self.peak) = (track_rginfo["replaygain_album_gain"], track_rginfo["replaygain_album_peak"]) # type: ignore
        else:
            del self.gain
            del self.peak
        # Now save the tags to the files
        self.save()
Ejemplo n.º 4
0
    def do_gain(self, force: bool = False, gain_type: Union[None, str] = None,
                dry_run: bool = False, verbose: bool = False) -> None:
        '''Analyze all tracks in the album, and add replay gain tags
        to the tracks based on the analysis.

        If force is False (the default) and the album already has
        replay gain tags, then do nothing.

        gain_type can be one of "album", "track", or "auto", as
        described in the help. If provided to this method, it will sef
        the object's gain_type field.
        '''
        if gain_type is not None:
            self.gain_type = gain_type
        # This performs some additional checks
        gain_type = "album" if self.want_album_gain() else "track"
        assert gain_type in ("album", "track")
        if self.has_valid_rgdata():
            if force:
                logger.info("Forcing reanalysis of previously-analyzed track set %s", repr(self.track_set_key_string()))
            else:
                logger.info("Skipping previously-analyzed track set %s", repr(self.track_set_key_string()))
                return
        else:
            logger.info('Analyzing track set %s', repr(self.track_set_key_string()))
        rginfo = self.gain_backend.compute_gain(self.filenames)
        # Save track gains
        for fname in self.RGTracks.keys():
            track = self.RGTracks[fname]
            track_rginfo = rginfo[fname]
            (track.gain, track.peak) = (track_rginfo["replaygain_track_gain"], track_rginfo["replaygain_track_peak"]) # type: ignore
        # Set or unset album gain
        if gain_type == "album":
            album_rginfo = next(iter(rginfo.values()))
            (self.gain, self.peak) = (track_rginfo["replaygain_album_gain"], track_rginfo["replaygain_album_peak"]) # type: ignore
        else:
            del self.gain
            del self.peak
        # Now save the tags to the files
        self.save()
Ejemplo n.º 5
0
def main(force_reanalyze: bool = False,
         include_hidden: bool = False,
         dry_run: bool = False,
         gain_type: str = 'auto',
         backend: str = 'auto',
         jobs: int = default_job_count(),
         low_memory: bool = False,
         quiet: bool = False,
         verbose: bool = False,
         *music_dir: str,
         ) -> None:
    '''Add replaygain tags to your music files.'''

    try:
        from tqdm import tqdm
    except ImportError:
        # Fallback: No progress bars
        tqdm = tqdm_fake
    if quiet:
        logger.setLevel(logging.WARN)
        tqdm = tqdm_fake
    elif verbose:
        logger.setLevel(logging.DEBUG)
    else:
        logger.setLevel(logging.INFO)

    if backend == 'auto':
        backend_exceptions: List[BackendUnavailableException] = []
        for bname in known_backends:
            try:
                gain_backend = get_backend(bname)
                logger.info('Selected the {} backend to compute ReplayGain'.format(bname))
                break
            except BackendUnavailableException as ex:
                backend_exceptions.append(ex)
        else:
            for exc in backend_exceptions:
                logger.error(exc.args[0])
            logger.error('Could not find any usable backends. Perhaps you have not installed the prerequisites?')
            sys.exit(1)
    else:
        try:
            gain_backend = get_backend(backend)
            logger.info('Using the {} backend to compute ReplayGain'.format(backend))
        except BackendUnavailableException as ex:
            logger.error(ex.args[0])
            sys.exit(1)

    track_constructor = RGTrack
    if dry_run:
        logger.warn('This script is running in "dry run" mode, so no files will actually be modified.')
        track_constructor = RGTrackDryRun
    if len(music_dir) == 0:
        logger.error("You did not specify any music directories or files. Exiting.")
        sys.exit(1)
    music_directories = list(unique(map(fullpath, music_dir)))
    logger.info("Searching for music files in the following locations:\n%s", "\n".join(music_directories),)
    all_music_files = get_all_music_files(music_directories,
                                          ignore_hidden=(not include_hidden))
    if low_memory:
        tracks = map(track_constructor, all_music_files)
        track_sets = RGTrackSet.MakeTrackSets(tracks, gain_backend=gain_backend)
    else:
        tracks = map(track_constructor, tqdm(all_music_files, desc="Searching"))
        track_sets = list(RGTrackSet.MakeTrackSets(tracks, gain_backend=gain_backend))
        if len(track_sets) == 0:
            logger.error("Failed to find any tracks in the directories you specified. Exiting.")
            sys.exit(1)
        if (jobs > len(track_sets)):
            jobs = len(track_sets)

    logger.info("Beginning analysis")

    handler = TrackSetHandler(force=force_reanalyze, gain_type=gain_type, dry_run=dry_run, verbose=verbose)
    # Wrapper that runs the handler in a subprocess, allowing for
    # parallel operation
    def wrapped_handler(track_set: RGTrackSet) -> RGTrackSet:
        p = Process(target=handler, args=(track_set,))
        try:
            p.start()
            p.join()
            if p.exitcode != 0:
                logger.error("Subprocess exited with code %s for %s", p.exitcode, track_set.track_set_key_string())
        finally:
            if p.is_alive():
                logger.debug("Killing subprocess")
                p.terminate()
        return track_set

    pool = None
    try:
        if jobs <= 1:
            # Sequential
            handled_track_sets = map(handler, track_sets)
        else:
            # Parallel (Using process pool doesn't work, so instead we
            # use Process instance within each thread)
            pool = ThreadPool(jobs)
            handled_track_sets = pool.imap_unordered(wrapped_handler, track_sets)
        # Wait for completion
        iter_len = None if low_memory else len(cast(Sized, track_sets))
        for ts in tqdm(handled_track_sets, total=iter_len, desc="Analyzing"):
            pass
        logger.info("Analysis complete.")
    except KeyboardInterrupt:
        if pool is not None:
            logger.debug("Terminating process pool")
            pool.terminate()
            pool = None
        raise
    finally:
        if pool is not None:
            logger.debug("Closing transcode process pool")
            pool.close()
    if dry_run:
        logger.warn('This script ran in "dry run" mode, so no files were actually modified.')
    pass
Ejemplo n.º 6
0
def main(force_reanalyze: bool = False,
         include_hidden: bool = False,
         dry_run: bool = False,
         gain_type: str = 'auto',
         backend: str = 'auto',
         jobs: int = default_job_count(),
         low_memory: bool = False,
         quiet: bool = False,
         verbose: bool = False,
         *music_dir: str):
    '''Add replaygain tags to your music files.'''

    try:
        from tqdm import tqdm
    except ImportError:
        # Fallback: No progress bars
        tqdm = tqdm_fake
    if quiet:
        logger.setLevel(logging.WARN)
        tqdm = tqdm_fake
    elif verbose:
        logger.setLevel(logging.DEBUG)
    else:
        logger.setLevel(logging.INFO)

    if backend == 'auto':
        for bname in known_backends:
            try:
                gain_backend = get_backend(bname)
                logger.info("Selected the %s backend to compute ReplayGain",
                            bname)
                break
            except BackendUnavailableException:
                pass
        else:
            raise BackendUnavailableException(
                "Could not find any usable backends.")
    else:
        gain_backend = get_backend(backend)
        logger.info("Using the %s backend to compute ReplayGain", backend)

    track_constructor = RGTrack
    if dry_run:
        logger.warn(
            'This script is running in "dry run" mode, so no files will actually be modified.'
        )
        track_constructor = RGTrackDryRun
    if len(music_dir) == 0:
        logger.error(
            "You did not specify any music directories or files. Exiting.")
        sys.exit(1)
    music_directories = list(unique(map(fullpath, music_dir)))
    logger.info(
        "Searching for music files in the following locations:\n%s",
        "\n".join(music_directories),
    )
    all_music_files = get_all_music_files(music_directories,
                                          ignore_hidden=(not include_hidden))
    if low_memory:
        tracks = map(track_constructor, all_music_files)
        track_sets = RGTrackSet.MakeTrackSets(tracks,
                                              gain_backend=gain_backend)
    else:
        tracks = map(track_constructor, tqdm(all_music_files,
                                             desc="Searching"))
        track_sets = list(
            RGTrackSet.MakeTrackSets(tracks, gain_backend=gain_backend))
        if len(track_sets) == 0:
            logger.error(
                "Failed to find any tracks in the directories you specified. Exiting."
            )
            sys.exit(1)
        if (jobs > len(track_sets)):
            jobs = len(track_sets)

    logger.info("Beginning analysis")

    handler = TrackSetHandler(force=force_reanalyze,
                              gain_type=gain_type,
                              dry_run=dry_run,
                              verbose=verbose)

    # Wrapper that runs the handler in a subprocess, allowing for
    # parallel operation
    def wrapped_handler(track_set: RGTrackSet) -> RGTrackSet:
        p = Process(target=handler, args=(
            track_set,
        ))  # type: ignore # https://github.com/python/mypy/issues/797
        try:
            p.start()
            p.join()
            if p.exitcode != 0:  # type: ignore
                logger.error("Subprocess exited with code %s for %s",
                             p.exitcode,
                             track_set.track_set_key_string())  # type: ignore
        finally:
            if p.is_alive():
                logger.debug("Killing subprocess")
                p.terminate()
        return track_set

    pool = None
    try:
        if jobs <= 1:
            # Sequential
            handled_track_sets = map(
                handler, track_sets
            )  # type: ignore # https://github.com/python/mypy/issues/797
        else:
            # Parallel (Using process pool doesn't work, so instead we
            # use Process instance within each thread)
            pool = ThreadPool(jobs)
            handled_track_sets = pool.imap_unordered(
                wrapped_handler, track_sets
            )  # type: ignore # https://github.com/python/typeshed/issues/683
        # Wait for completion
        iter_len = None if low_memory else len(cast(Sized, track_sets))
        for ts in tqdm(handled_track_sets, total=iter_len, desc="Analyzing"):
            pass
        logger.info("Analysis complete.")
    except KeyboardInterrupt:
        if pool is not None:
            logger.debug("Terminating process pool")
            pool.terminate()
            pool = None
        raise
    finally:
        if pool is not None:
            logger.debug("Closing transcode process pool")
            pool.close()
    if dry_run:
        logger.warn(
            'This script ran in "dry run" mode, so no files were actually modified.'
        )
    pass