def test_add_task(): scheduler = Scheduler() scheduler.start() assert scheduler.tasks.qsize() == 0 scheduler.add(generate_task()) assert scheduler.tasks.qsize() == 1 scheduler.stop()
def test_custom_validator(mock_print): def test_validator(job): job() return SchedulerPolicies.RETRY scheduler = Scheduler(validator=test_validator, interval=1) print_task = Task(mock_print, Priorities.MEDIUM) scheduler.add(print_task) scheduler.start() sleep(3) scheduler.stop() assert 2 <= mock_print.call_count <= 3
def test_default_validator(mock_print, mock_raise): """ Test the default task validator Default validator (_default_validator) behaviour - task raises: retry task - task finishes: continue to next task """ scheduler = Scheduler(interval=1) raise_task = Task(mock_raise, Priorities.HIGH) print_task = Task(mock_print, Priorities.MEDIUM) scheduler.add(print_task) scheduler.add(raise_task) scheduler.start() sleep(3) scheduler.stop() assert 2 <= mock_raise.call_count <= 3 assert mock_print.call_count == 0
class MPHandler: """ A class providing functions which perform mathematical computations using a Scheduler. Important: - Keep a reference to any instances of `MPHandler` to prevent them from being garbage collected before tasks have completed. - Calling any function on a running MPHandler will stop any tasks currently in progress. """ def __init__(self): self.scheduler: Scheduler = None async def coro_transform( self, params: TFParams, on_progress: Callable[[int, int], None]) -> List[tuple]: """ Performs a wavelet transform or windowed Fourier transform of signals. Used in "time-frequency analysis". :param params: the parameters which are used in the algorithm :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler(progress_callback=on_progress) signals: Signals = params.signals params.remove_signals( ) # Don't want to pass large unneeded object to other process. for time_series in signals: self.scheduler.add( target=_time_frequency, args=(time_series, params), process_type=mp.Process, queue_type=mp.Queue, ) return await self.scheduler.run() async def coro_phase_coherence( self, signals: SignalPairs, params: PCParams, on_progress: Callable[[int, int], None], ) -> List[tuple]: """ Performs wavelet phase coherence between signal pairs. Used in "wavelet phase coherence". :param signals: the pairs of signals :param params: the parameters which are used in the algorithm :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler(progress_callback=on_progress) for i in range(signals.pair_count()): pair = signals.get_pair_by_index(i) self.scheduler.add( target=_phase_coherence, args=(pair, params), subtasks=params.surr_count, process_type=mp.Process, queue_type=mp.Queue, ) return await self.scheduler.run() async def coro_ridge_extraction( self, params: REParams, on_progress: Callable[[int, int], None]) -> List[tuple]: """ Performs ridge extraction on wavelet transforms. Used in "ridge extraction and filtering". :param params: the parameters which are used in the algorithm :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler(progress_callback=on_progress) signals = params.signals num_transforms = len(signals) intervals = params.intervals for i in range(num_transforms): for j in range(len(intervals)): fmin, fmax = intervals[j] params.set_item(_fmin, fmin) params.set_item(_fmax, fmax) self.scheduler.add( target=_ridge_extraction, args=(signals[i], params), process_type=mp.Process, queue_type=mp.Queue, ) return await self.scheduler.run() async def coro_bandpass_filter( self, signals: Signals, intervals: tuple, on_progress: Callable[[int, int], None], ) -> List[tuple]: """ Performs bandpass filter on signals. Used in "ridge extraction and filtering". :param signals: the signals :param intervals: the intervals to calculate bandpass filter on :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler(progress_callback=on_progress) for s in signals: fs = s.frequency for i in range(len(intervals)): fmin, fmax = intervals[i] self.scheduler.add( target=_bandpass_filter, args=(s, fmin, fmax, fs), process_type=mp.Process, queue_type=mp.Queue, ) return await self.scheduler.run() async def coro_bayesian( self, signals: SignalPairs, paramsets: List[ParamSet], on_progress: Callable[[int, int], None], ) -> List[tuple]: """ Performs Bayesian inference on signal pairs. Used in "dynamical Bayesian inference". :param signals: the signals :param paramsets: the parameter sets to use in the algorithm :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler(progress_callback=on_progress) for params in paramsets: for pair in signals.get_pairs(): self.scheduler.add( target=_dynamic_bayesian_inference, args=(*pair, params), process_type=mp.Process, queue_type=mp.Queue, ) return await self.scheduler.run() async def coro_bispectrum_analysis( self, signals: SignalPairs, params: BAParams, on_progress: Callable[[int, int], None], ) -> List[tuple]: """ Performs wavelet bispectrum analysis on signal pairs. Used in "wavelet bispectrum analysis". :param signals: the signal pairs :param params: the parameters to use in the algorithm :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler(progress_callback=on_progress) for pair in signals.get_pairs(): self.scheduler.add( target=_bispectrum_analysis, args=(*pair, params), subtasks=4, process_type=mp.Process, queue_type=mp.Queue, ) return await self.scheduler.run() async def coro_biphase( self, signals: SignalPairs, fs: float, f0: float, fr: Tuple[float, float], on_progress: Callable[[int, int], None], ) -> List[tuple]: """ Calculates biphase and biamplitude. Used in "wavelet bispectrum analysis". :param signals: the signal pairs :param fs: the sampling frequency :param f0: the resolution :param fr: 'x' and 'y' frequencies :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler(progress_callback=on_progress) for pair in signals.get_pairs(): opt = pair[0].output_data.opt self.scheduler.add( target=_biphase, args=(*pair, fs, f0, fr, opt), process_type=mp.Process, queue_type=mp.Queue, ) return await self.scheduler.run() async def coro_preprocess(self, signal: TimeSeries, fmin: float, fmax: float) -> List[Tuple]: """ Performs preprocessing on a single signal. :param signal: the signal as a 1D array :param fmin: the minimum frequency :param fmax: the maximum frequency :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler() self.scheduler.add( target=_preprocess, args=(signal.signal, signal.frequency, fmin, fmax), process_type=mp.Process, queue_type=mp.Queue, ) return await self.scheduler.run() def stop(self): """ Stops the tasks in progress. The MPHandler instance can be reused. """ if self.scheduler: self.scheduler.terminate()
def harmonicfinder_impl_python( signal: ndarray, fs: float, scale_min: float, scale_max: float, sigma: float, time_res: float, surr_count: int, parallel: bool, crop: bool, ) -> Tuple[ndarray, ndarray, ndarray, ndarray]: """ Python implementation of the harmonicfinder function. """ if int(fs * time_res) != fs * time_res: warnings.warn( f"fs * time_res must be an integer, but it is {fs * time_res}. " f"You may wish to try a different time resolution.", RuntimeWarning, ) try: x, y = signal.shape if y > x: signal = signal[0, :] except ValueError: pass output1 = modbasicwavelet_flow_cmplx4(signal, fs, scale_min, scale_max, sigma, time_res) scalefreq = scale_frequency(scale_min, scale_max, sigma) m, n = output1.shape detsig = signal ressur = np.empty(( surr_count, m, m, )) ressur.fill(np.NaN) scheduler = Scheduler(shared_memory=False) surr_args = [(i, output1, m, n, detsig, fs, scale_min, scale_max, sigma, time_res) for i in range(surr_count)] # Add harmonic calculation for main signal to scheduler. This should be the first item added. scheduler.add(target=_calculate_harmonics, args=(output1, m, n, surr_count, True)) # Add surrogate calculations to scheduler. for args in surr_args: scheduler.add(target=_calculate_surrogate, args=args) if surr_count > 0 and parallel: # Run scheduler. result: List[ndarray] = scheduler.run_blocking() # Get main harmonics result. res = result[0] # Get surrogate results. for sigb in range(1, len(result)): ressur[sigb - 1, :, :] = result[sigb][:, :] else: res = _calculate_harmonics(output1, m, n, surr_count, parallel) sig = np.empty((1 + ressur.shape[0], )) pos = np.empty((m, m)) for a1 in range(m): for a2 in range(1, a1 + 2): _res_a1_a2 = res[a1, a2 - 1] if not hasattr(_res_a1_a2, "__len__"): _res_a1_a2 = np.asarray((_res_a1_a2, )) isurr = np.argsort( np.concatenate(( _res_a1_a2, ressur[:, a1, a2 - 1], ))) sig[isurr] = np.arange(0, surr_count + 1) pos[a1, a2 - 1] = sig[0] pos[a2 - 1, a1] = sig[0] pos1 = pos.copy() surrmean = np.empty(( m, m, )) surrstd = np.empty(( m, m, )) for a1 in range(m): for a2 in range(m): surrmean[a1, a2] = np.nanmean(ressur[:, a1, a2]) surrstd[a1, a2] = np.nanstd(ressur[:, a1, a2]) sig = (res[a1, a2] - surrmean[a1, a2]) / surrstd[a1, a2] pos[a1, a2] = np.min([sig, 5]) pos2 = pos if crop and not np.all(np.isnan(res), axis=( 0, 1, )): # Crop out rows which are completely NaN. mask1 = ~np.all(np.isnan(res), axis=0) # Columns which only contain NaN values. res = res[mask1][:, mask1] scalefreq = scalefreq[mask1] pos1 = pos1[mask1][:, mask1] pos2 = pos2[mask1][:, mask1] # Crop out rows and columns which contain any NaN values at the top right of the signal. mask2 = np.any(np.isnan(res), axis=0) # Columns which contain a NaN value. try: index = mask2.nonzero()[0][-1] + 1 res = res[index:, index:] scalefreq = scalefreq[index:] pos1 = pos1[index:, index:] pos2 = pos2[index:, index:] warnings.warn( f"Cropping invalid values from the results. " f"The frequency range may be much smaller than expected.", RuntimeWarning, ) except IndexError: pass return ( scalefreq, res.conj().T, pos1.conj().T, pos2.conj().T, )
class MPHandler: """ A class providing functions which perform mathematical computations using a Scheduler. Important: - Keep a reference to any instances of `MPHandler` to prevent them from being garbage collected before tasks have completed. - Calling any function on a running MPHandler will stop any tasks currently in progress. """ # On Linux, we don't need to run in a thread because processes can be forked; we also need to avoid # using a thread because this will cause issues with the LD_LIBRARY_PATH. should_run_in_thread = not OS.is_linux() # On macOS, multiprocess has issues so we need to use threads for everything. only_threads = OS.is_mac_os() def __init__(self): self.scheduler: Scheduler = None async def coro_transform( self, params: TFParams, on_progress: Callable[[int, int], None]) -> List[Tuple]: """ Performs a wavelet transform or windowed Fourier transform of signals. Used in "time-frequency analysis". :param params: the parameters which are used in the algorithm :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) signals: Signals = params.signals params.remove_signals( ) # Don't want to pass large unneeded object to other process. return await self.scheduler.map( target=_time_frequency, args=[(time_series, params, True) for time_series in signals], process_type=mp.Process, queue_type=mp.Queue, ) async def coro_harmonics( self, signals: Signals, params: DHParams, preprocess: bool, on_progress: Callable[[int, int], None], ) -> List[Tuple]: """ Detects harmonics in signals. :param signals: the signals :param params: the parameters to pass to the harmonic finder :param preprocess: whether to perform pre-processing on the signals :param on_progress: the progress callback :return: list containing the output from each process """ # Whether to parallelize the algorithm for each calculation. parallel = len(signals) < Scheduler.optimal_process_count() self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) args = [( preprocess, sig.signal, params, *params.args(), parallel, params.crop, ) for sig in signals] return await self.scheduler.map(target=harmonic_wrapper, args=args) async def coro_phase_coherence( self, signals: SignalPairs, params: PCParams, on_progress: Callable[[int, int], None], ) -> List[Tuple]: """ Performs wavelet phase coherence between signal pairs. Used in "wavelet phase coherence". :param signals: the pairs of signals :param params: the parameters which are used in the algorithm :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) return await self.scheduler.map( target=_phase_coherence, args=[(pair, params) for pair in signals.get_pairs()], subtasks=params.surr_count, process_type=mp.Process, queue_type=mp.Queue, ) async def coro_ridge_extraction( self, params: REParams, on_progress: Callable[[int, int], None]) -> List[Tuple]: """ Performs ridge extraction on wavelet transforms. Used in "ridge extraction and filtering". :param params: the parameters which are used in the algorithm :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) signals = params.signals num_transforms = len(signals) intervals = params.intervals for i in range(num_transforms): for j in range(len(intervals)): fmin, fmax = intervals[j] params.set_item(_fmin, fmin) params.set_item(_fmax, fmax) self.scheduler.add( target=_ridge_extraction, args=(signals[i], params), process_type=mp.Process, queue_type=mp.Queue, ) return await self.scheduler.run() async def coro_bandpass_filter( self, signals: Signals, intervals: Tuple, on_progress: Callable[[int, int], None], ) -> List[Tuple]: """ Performs bandpass filter on signals. Used in "ridge extraction and filtering". :param signals: the signals :param intervals: the intervals to calculate bandpass filter on :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) for s in signals: fs = s.frequency for i in range(len(intervals)): fmin, fmax = intervals[i] self.scheduler.add( target=_bandpass_filter, args=(s, fmin, fmax, fs), process_type=mp.Process, queue_type=mp.Queue, ) return await self.scheduler.run() async def coro_bayesian( self, signals: SignalPairs, paramsets: List[ParamSet], on_progress: Callable[[int, int], None], ) -> List[Tuple]: """ Performs Bayesian inference on signal pairs. Used in "dynamical Bayesian inference". :param signals: the signals :param paramsets: the parameter sets to use in the algorithm :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) for params in paramsets: for pair in signals.get_pairs(): self.scheduler.add( target=_dynamic_bayesian_inference, args=(*pair, params), process_type=mp.Process, queue_type=mp.Queue, ) return await self.scheduler.run() async def coro_bispectrum_analysis( self, signals: SignalPairs, params: BAParams, on_progress: Callable[[int, int], None], ) -> List[Tuple]: """ Performs wavelet bispectrum analysis on signal pairs. Used in "wavelet bispectrum analysis". :param signals: the signal pairs :param params: the parameters to use in the algorithm :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) return await self.scheduler.map( target=_bispectrum_analysis, args=[(*pair, params) for pair in signals.get_pairs()], subtasks=4, process_type=mp.Process, queue_type=mp.Queue, ) async def coro_biphase( self, signals: SignalPairs, fs: float, f0: float, fr: Tuple[float, float], on_progress: Callable[[int, int], None], ) -> List[Tuple]: """ Calculates biphase and biamplitude. Used in "wavelet bispectrum analysis". :param signals: the signal pairs :param fs: the sampling frequency :param f0: the resolution :param fr: 'x' and 'y' frequencies :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) args = [(s1, s2, fs, f0, fr, s1.output_data.opt) for s1, s2 in signals.get_pairs()] return await self.scheduler.map(target=_biphase, args=args, process_type=mp.Process, queue_type=mp.Queue) async def coro_group_coherence(self, sig1a: ndarray, sig1b: ndarray, fs: float, percentile: Optional[float], on_progress: Callable[[int, int], None], *args, **kwargs) -> List[Tuple]: """ Calculates group coherence. Parameters ---------- sig1a : ndarray The set of signals A for group 1. sig1b : ndarray The set of signals B for group 1. fs : float The sampling frequency of the signals. percentile : Optional[float] The percentile at which the surrogates will be subtracted. on_progress : Callable Function called to report progress. args Arguments to pass to the wavelet transform. kwargs Keyword arguments to pass to the wavelet transform. Returns ------- freq : ndarray [1D array] The frequencies. coh1 : ndarray [2D array] The residual coherence for group 1. surr1 : ndarray [3D array] The surrogates for group 1. """ self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) return await self.scheduler.map( target=functools.partial( pymodalib.group_coherence, sig1a, sig1b, fs, percentile, True, *args, **kwargs, ), args=[ tuple(), ], ) async def coro_dual_group_coherence(self, sig1a: ndarray, sig1b: ndarray, sig2a: ndarray, sig2b: ndarray, fs: float, percentile: Optional[float], on_progress: Callable[[int, int], None], *args, **kwargs) -> List[Tuple]: """ Calculates group coherence. Parameters ---------- sig1a : ndarray The set of signals A for group 1. sig1b : ndarray The set of signals B for group 1. sig2a : ndarray The set of signals A for group 2. sig2b : ndarray The set of signals B for group 2. fs : float The sampling frequency of the signals. percentile : Optional[float] The percentile at which the surrogates will be subtracted. on_progress : Callable Function called to report progress. args Arguments to pass to the wavelet transform. kwargs Keyword arguments to pass to the wavelet transform. Returns ------- freq : ndarray [1D array] The frequencies. coh1 : ndarray [2D array] The residual coherence for group 1. coh2 : ndarray [2D array] The residual coherence for group 2. surr1 : ndarray [3D array] The surrogates for group 1. surr2 : ndarray [3D array] The surrogates for group 2. """ self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) return await self.scheduler.map( target=functools.partial( pymodalib.dual_group_coherence, sig1a, sig1b, sig2a, sig2b, fs, percentile, *args, **kwargs, ), args=[ tuple(), ], ) async def coro_statistical_test( self, freq: ndarray, coh1: ndarray, coh2: ndarray, bands: List[Tuple[float, float]], on_progress: Callable[[int, int], None], ) -> Dict[Tuple[float, float], float]: """ Performs a statistical test on the results of group phase coherence, to check for significance. Parameters ---------- freq : ndarray [1D array] The frequencies from group coherence. coh1 : ndarray [2D array] The coherence of the first group. coh2 : ndarray [2D array] The coherence of the second group. bands : List[Tuple[float,float]] List containing the frequency bands which will be tested for significance. on_progress : Callable Function called to report progress. Returns ------- pvalues : Dict[Tuple[float, float], float] A list containing the p-values for each frequency band. """ self.stop() self.scheduler = Scheduler( run_in_thread=self.should_run_in_thread, progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) from pymodalib.algorithms.group_coherence import statistical_test results = (await self.scheduler.map( target=statistical_test, args=[( freq, coh1, coh2, bands, )], ))[0] return dict(zip(bands, results)) async def coro_preprocess(self, signals: Union[TimeSeries, List[TimeSeries]], fmin: float, fmax: float) -> List[ndarray]: """ Performs preprocessing on a single signal. :param signals: the signal or signals to perform pre-processing on :param fmin: the minimum frequency :param fmax: the maximum frequency :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler( run_in_thread=True, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) if isinstance(signals, TimeSeries): signals = [signals] args = [(s.signal, s.frequency, fmin, fmax) for s in signals] return await self.scheduler.map( target=pymodalib.preprocess, args=args, process_type=mp.Process, queue_type=mp.Queue, ) def stop(self): """ Stops the tasks in progress. The MPHandler instance can be reused. """ if self.scheduler: self.scheduler.terminate()
class ManagePluginsDialog(QDialog): def __init__(self, parent, installed: List[str]): super(ManagePluginsDialog, self).__init__(parent) uic.loadUi(resources.get_manage_plugins_layout(), self) self.setWindowTitle("Manage plugins") self.btn_apply.clicked.connect(self.on_apply_clicked) self.installed = installed self.changes = {} asyncio.ensure_future(self.coro_get_plugins()) async def coro_get_plugins(self): self.scheduler = Scheduler() self.scheduler.add(target=online.find_online_plugins) self.tbl1: QTableView = self.tbl_trusted self.tbl2: QTableView = self.tbl_community self.btn_apply.clicked.connect(self.on_apply_clicked) self.model1 = QStandardItemModel() self.model2 = QStandardItemModel() for m in (self.model1, self.model2): m.itemChanged.connect(self.on_item_changed) m.setHorizontalHeaderLabels(["Install status", "Game", "Author"]) for t, m in zip([self.tbl1, self.tbl2], [self.model1, self.model2]): t.verticalHeader().setVisible(False) t.setModel(m) trusted, community = (await self.scheduler.run())[0] self.add_items(self.model1, trusted) self.add_items(self.model2, community) self.add_dev_items(self.model2, self.installed) self.tbl1.resizeColumnsToContents() self.tbl2.resizeColumnsToContents() def add_items(self, model, items) -> None: for p in items: checkbox = QStandardItem(True) checkbox.setCheckable(True) checkbox.setText("Installed") checkbox.url = p.url if p.url in self.installed: checkbox.setCheckState(Qt.Checked) else: checkbox.setCheckState(Qt.Unchecked) print(p.description) model.appendRow( [checkbox, QStandardItem(p.game), QStandardItem(p.author)]) def add_dev_items(self, model, items: Dict[str, str]) -> None: for key, value in items.items(): if not key.startswith("DEV"): continue checkbox = QStandardItem(True) checkbox.setCheckable(True) checkbox.setText("Installed") checkbox.url = key checkbox.setCheckState(Qt.Checked) model.appendRow( [checkbox, QStandardItem(key), QStandardItem("dev")]) def on_apply_clicked(self) -> None: self.accept() def get_changes(self) -> Dict: return self.changes def on_item_changed(self, item) -> None: url = self.get_plugin(self.model1, item) if not url: url = self.get_plugin(self.model2, item) install_status = item.checkState() == Qt.Checked print(f"Changed installation status for {url} to {install_status}.") if self.changes.get(url): del self.changes[url] else: self.changes[url] = install_status def get_plugin(self, model, checkbox) -> Optional[str]: for i in range(model.rowCount()): item = model.item(i, 0) if checkbox == item: return item.url return None