コード例 #1
0
ファイル: replaygain.py プロジェクト: arisskz6/Scripts-1
 async def run(self):
     # We could use the default single-threaded executor with basically the same performance
     # (because of Python's GIL), but the ThreadPoolExecutor allows to limit the maximum number
     # of workers and thus the maximum number of concurrent subprocesses.
     with ThreadPoolExecutor(max_workers=cores_count()) as executor:
         loop = asyncio.get_event_loop()
         tasks = [
             loop.run_in_executor(executor, self.worker, path)
             for path in self.queue_generator()
         ]
         for result in await asyncio.gather(*tasks):
             pass
コード例 #2
0
ファイル: replaygain.py プロジェクト: mrmichalis/Scripts-1
    def __init__(self, logger, options):
        self.logger = logger
        self.options = options
        self.recursive = options.recursive
        self.paths = options.files
        del options.recursive   # don't want to pass it to ReplayGain object
        del options.files   # don't want to pass it to ReplayGain object

        self.threads = cores_count()

        self.killed = threading.Event()
        self.threadsFinished = 0
        self.queue = LockedIterator(self.queue_generator())
コード例 #3
0
ファイル: mp3convert.py プロジェクト: mrmichalis/Scripts-1
    def __init__(self, args):
        self.countAudioFiles = 0
        self.countHigherBitrate = 0
        self.countDifferentFormat = 0
        self.countErrors = 0
        self.countNonAudioFiles = 0

        self.dry_run = args.dry_run
        self.bitrate = args.bitrate
        self.verbose = args.verbose
        self.recursive = args.recursive
        self.deleteAfter = args.delete_after
        self.outputExtension = "." + args.output_extension
        self.paths = args.path

        self.threads = cores_count()

        self.killed = threading.Event()
        self.threadsFinished = 0
        self.queue = LockedIterator(self.queue_generator())