def separate_into_parts(self, input_path: str, output_path: str): """Creates a dynamic mix :param input_path: Input path :param output_path: Output path """ input_path = Path(input_path) output_path = Path(output_path) model = self.get_model() raw_sources = self.apply_model(model, input_path) # Export all source MP3s in parallel pool = Pool() tasks = [] for source, name in zip(raw_sources, ['drums', 'bass', 'other', 'vocals']): source = source.cpu().transpose(0, 1).numpy() filename = f'{name}.mp3' print(f'Exporting {name} MP3...') task = pool.apply_async(self.audio_adapter.save, (output_path / filename, source, self.sample_rate, 'mp3', self.bitrate)) tasks.append(task) try: pool.close() pool.join() except SoftTimeLimitExceeded as e: pool.terminate() raise e
def separate_into_parts(self, input_path: str, output_path: str): """Creates a dynamic mix :param input_path: Input path :param output_path: Output path """ input_path = Path(input_path) output_path = Path(output_path) self.download_and_verify() raw_sources = self.apply_model(input_path) # Export all source MP3s in parallel pool = Pool() tasks = [] for source, name in zip(raw_sources, ['drums', 'bass', 'other', 'vocals']): source = (source * 2**15).clamp_(-2**15, 2**15 - 1).short() source = source.cpu().transpose(0, 1).numpy() filename = f'{name}.mp3' print(f'Exporting {name} MP3...') task = pool.apply_async(encode_mp3, (source, str(output_path / filename), self.bitrate, self.verbose)) tasks.append(task) try: pool.close() pool.join() except SoftTimeLimitExceeded as e: pool.terminate() raise e
class MultiprocessingDistributor(DistributorBaseClass): """ Distributor using a multiprocessing Pool to calculate the jobs in parallel on the local machine. """ def __init__(self, n_workers, disable_progressbar=False, progressbar_title="Feature Extraction", show_warnings=True): """ Creates a new MultiprocessingDistributor instance :param n_workers: How many workers should the multiprocessing pool have? :type n_workers: int :param disable_progressbar: whether to show a progressbar or not. :type disable_progressbar: bool :param progressbar_title: the title of the progressbar :type progressbar_title: basestring :param show_warnings: whether to show warnings or not. :type show_warnings: bool """ self.pool = Pool(processes=n_workers, initializer=initialize_warnings_in_workers, initargs=(show_warnings, )) self.n_workers = n_workers self.disable_progressbar = disable_progressbar self.progressbar_title = progressbar_title def distribute(self, func, partitioned_chunks, kwargs): """ Calculates the features in a parallel fashion by distributing the map command to a thread pool :param func: the function to send to each worker. :type func: callable :param partitioned_chunks: The list of data chunks - each element is again a list of chunks - and should be processed by one worker. :type partitioned_chunks: iterable :param kwargs: parameters for the map function :type kwargs: dict of string to parameter :return: The result of the calculation as a list - each item should be the result of the application of func to a single element. """ return self.pool.imap_unordered(partial(func, **kwargs), partitioned_chunks) def close(self): """ Collects the result from the workers and closes the thread pool. """ self.pool.close() self.pool.terminate() self.pool.join()
def extractor_pool(self, func, iterable): ''' Extract items (billard multiprocessing use) :param func: function :param iterable: list ''' _finalizers = list() p = Pool(processes=cpu_count()) _finalizers.append(Finalize(p, p.terminate)) try: p.map_async(func, iterable) p.close() p.join() finally: p.terminate()