def on_open_logs_clicked() -> None: location = file_utils.pymoda_path if OS.is_windows(): os.startfile(location) elif OS.is_linux(): subprocess.Popen(["xdg-open", location]) elif OS.is_mac_os(): subprocess.Popen(["open", location])
def get_pymoda_download_url() -> str: asset = "" if OS.is_windows(): asset = "PyMODA-win64.zip" elif OS.is_linux(): asset = "PyMODA-linux_x86_64.tar.gz" elif OS.is_mac_os(): asset = "PyMODA-macOS.tar.gz" return f"https://github.com/luphysics/pymoda/releases/latest/download/{asset}"
def get_launcher_download_url() -> str: asset = "" if OS.is_windows(): return "" elif OS.is_linux(): asset = "launcher-linux" elif OS.is_mac_os(): asset = "launcher-macos" return ( f"https://github.com/luphysics/pymoda-launcher/releases/latest/download/{asset}" )
def setup_matlab_runtime(): """ Sets the LD_LIBRARY_PATH variable to the value provided in the arguments. Required on Linux, but can be safely called on other operating systems. Should not be executed in the main process, because this will crash PyQt on Linux. """ path = matlab_runtime() if path: if OS.is_linux(): os.environ["LD_LIBRARY_PATH"] = path elif OS.is_mac_os(): os.environ["DYLD_LIBRARY_PATH"] = path
def create_shortcut() -> str: """ Creates a shortcut to launch PyMODA with current arguments. Can be called on any operating system. """ if OS.is_windows(): status = _create_shortcut_windows() elif OS.is_linux(): status = _create_alias_nix() elif OS.is_mac_os(): status = _create_alias_nix() else: status = "Operating system unknown. Could not create shortcut." return status
def get_instance(tag, progress_signal) -> Updater: """ Creates the Updater for the current OS. Parameters ---------- tag : str The release tag of the latest release. progress_signal : pyqtSignal Signal which emits the download progress. Returns ------- Updater The Updater for the current OS. """ if OS.is_windows(): return WindowsUpdater(tag, progress_signal) elif OS.is_linux(): return LinuxUpdater(tag, progress_signal) else: return MacUpdater(tag, progress_signal)
def _create_alias_nix() -> str: """ Creates a command-line alias on *nix to launch PyMODA with current arguments, by adding the alias to ~/.bashrc and ~/.zshrc if it exists or zsh is installed. """ if OS.is_linux(): bashrc = _get_abs_path_in_home_folder(".bashrc") # Bash on Linux. else: bashrc = _get_abs_path_in_home_folder( ".bash_profile") # Bash on macOS. zshrc = _get_abs_path_in_home_folder(".zshrc") # Zsh. try: with open(bashrc, "r") as f: bash_lines = f.readlines() except FileNotFoundError: bash_lines = [] try: with open(zshrc, "r") as f: zsh_lines = f.readlines() except FileNotFoundError: if _is_zsh_installed(): zsh_lines = [] else: zsh_lines = None alias_pymoda = "alias pymoda=" line_to_add = ( f"{alias_pymoda}'{_get_executable_nix()} {_python_interpreter_arguments()}'\n\n" ) def filter_out_alias(lines: List[str]) -> List[str]: return list(filter(lambda line: alias_pymoda not in line, lines)) bash_lines = filter_out_alias(bash_lines) bash_lines.append(line_to_add) with open(bashrc, "w") as f: f.writelines(bash_lines) if zsh_lines is not None: zsh_lines = filter_out_alias(zsh_lines) zsh_lines.append(line_to_add) with open(zshrc, "w") as f: f.writelines(zsh_lines) return ("Created 'pymoda' alias to launch PyMODA with current arguments. " "Open a new terminal in any folder and try typing 'pymoda'.")
def __patched_run(self): try: self.__run() except Exception as e: if OS.is_windows(): sys.excepthook(*sys.exc_info()) else: # For some reason, `sys.excepthook` doesn't work in processes on *nix (even after monkey-patching) # so we'll write the logs manually. tb = "".join(traceback.format_tb(e.__traceback__)) msg = f"\n{type(e)}\n{tb}{e}" log_utils.process_write_log(msg) raise e
def check_matlab_runtime(self) -> None: """ Checks whether the LD_LIBRARY_PATH for the MATLAB Runtime is correctly passed to the program, and shows a dialog if appropriate. """ if (OS.is_linux() and not args.matlab_runtime() and self.settings.is_runtime_warning_enabled()): dialog = MatlabRuntimeDialog() dialog.exec() self.settings.set_runtime_warning_enabled( not dialog.dont_show_again) if not dialog.dont_show_again: sys.exit(0)
def _get_start_method() -> Optional[str]: """ Gets the start method, which depends on the current OS. For Windows and Linux, the defaults ('spawn' and 'fork' respectively) are fine. For macOS, the default was 'fork' until Python 3.8 but this causes errors: "The process has forked and you cannot use this CoreFoundation functionality safely. You MUST exec(). Breakon__THE_PROCESS_HAS_FORKED_AND_YOU_CANNOT_USE_THIS_COREFOUNDATION_FUNCTIONALITY___YOU_MUST_EXEC__() to debug." Therefore, the start method is set to 'spawn' on macOS. """ if OS.is_mac_os(): return "spawn" return None
def check_matlab_runtime(self) -> None: """ Checks whether the LD_LIBRARY_PATH for the MATLAB Runtime is correctly passed to the program, and shows a dialog if appropriate. """ from pymodalib.utils.matlab_runtime import get_runtime_status status = get_runtime_status() if (not OS.is_windows() and status is RuntimeStatus.NOT_EXISTS and not args.matlab_runtime() and self.settings.is_runtime_warning_enabled()): dialog = MatlabRuntimeDialog() dialog.exec() self.settings.set_runtime_warning_enabled( not dialog.dont_show_again) if not dialog.dont_show_again: sys.exit(0)
def _get_launcher_name() -> str: return "launcher.exe" if OS.is_windows() else "launcher"
# # You should have received a copy of the GNU General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. import os import warnings from os import path from os.path import join from utils.os_utils import OS pymoda_path = None username = os.environ.get("USERNAME") or os.environ.get("USER") home = os.path.expanduser("~") if OS.is_windows(): pymoda_path = f"C:\\Users\\{username}\\AppData\\Roaming\\PyMODA" else: pymoda_path = f"{home}/.pymoda" os.makedirs(pymoda_path, exist_ok=True) log_path = join(pymoda_path, "pymoda.log") settings_path = join(pymoda_path, "settings.conf") _whitelist = ["src", "res"] def get_root_folder() -> str: """ Returns the absolute path to PyMODA's root folder.
class MPHandler: """ A class providing functions which perform mathematical computations using a Scheduler. Important: - Keep a reference to any instances of `MPHandler` to prevent them from being garbage collected before tasks have completed. - Calling any function on a running MPHandler will stop any tasks currently in progress. """ # On Linux, we don't need to run in a thread because processes can be forked; we also need to avoid # using a thread because this will cause issues with the LD_LIBRARY_PATH. should_run_in_thread = not OS.is_linux() # On macOS, multiprocess has issues so we need to use threads for everything. only_threads = OS.is_mac_os() def __init__(self): self.scheduler: Scheduler = None async def coro_transform( self, params: TFParams, on_progress: Callable[[int, int], None]) -> List[Tuple]: """ Performs a wavelet transform or windowed Fourier transform of signals. Used in "time-frequency analysis". :param params: the parameters which are used in the algorithm :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) signals: Signals = params.signals params.remove_signals( ) # Don't want to pass large unneeded object to other process. return await self.scheduler.map( target=_time_frequency, args=[(time_series, params, True) for time_series in signals], process_type=mp.Process, queue_type=mp.Queue, ) async def coro_harmonics( self, signals: Signals, params: DHParams, preprocess: bool, on_progress: Callable[[int, int], None], ) -> List[Tuple]: """ Detects harmonics in signals. :param signals: the signals :param params: the parameters to pass to the harmonic finder :param preprocess: whether to perform pre-processing on the signals :param on_progress: the progress callback :return: list containing the output from each process """ # Whether to parallelize the algorithm for each calculation. parallel = len(signals) < Scheduler.optimal_process_count() self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) args = [( preprocess, sig.signal, params, *params.args(), parallel, params.crop, ) for sig in signals] return await self.scheduler.map(target=harmonic_wrapper, args=args) async def coro_phase_coherence( self, signals: SignalPairs, params: PCParams, on_progress: Callable[[int, int], None], ) -> List[Tuple]: """ Performs wavelet phase coherence between signal pairs. Used in "wavelet phase coherence". :param signals: the pairs of signals :param params: the parameters which are used in the algorithm :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) return await self.scheduler.map( target=_phase_coherence, args=[(pair, params) for pair in signals.get_pairs()], subtasks=params.surr_count, process_type=mp.Process, queue_type=mp.Queue, ) async def coro_ridge_extraction( self, params: REParams, on_progress: Callable[[int, int], None]) -> List[Tuple]: """ Performs ridge extraction on wavelet transforms. Used in "ridge extraction and filtering". :param params: the parameters which are used in the algorithm :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) signals = params.signals num_transforms = len(signals) intervals = params.intervals for i in range(num_transforms): for j in range(len(intervals)): fmin, fmax = intervals[j] params.set_item(_fmin, fmin) params.set_item(_fmax, fmax) self.scheduler.add( target=_ridge_extraction, args=(signals[i], params), process_type=mp.Process, queue_type=mp.Queue, ) return await self.scheduler.run() async def coro_bandpass_filter( self, signals: Signals, intervals: Tuple, on_progress: Callable[[int, int], None], ) -> List[Tuple]: """ Performs bandpass filter on signals. Used in "ridge extraction and filtering". :param signals: the signals :param intervals: the intervals to calculate bandpass filter on :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) for s in signals: fs = s.frequency for i in range(len(intervals)): fmin, fmax = intervals[i] self.scheduler.add( target=_bandpass_filter, args=(s, fmin, fmax, fs), process_type=mp.Process, queue_type=mp.Queue, ) return await self.scheduler.run() async def coro_bayesian( self, signals: SignalPairs, paramsets: List[ParamSet], on_progress: Callable[[int, int], None], ) -> List[Tuple]: """ Performs Bayesian inference on signal pairs. Used in "dynamical Bayesian inference". :param signals: the signals :param paramsets: the parameter sets to use in the algorithm :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) for params in paramsets: for pair in signals.get_pairs(): self.scheduler.add( target=_dynamic_bayesian_inference, args=(*pair, params), process_type=mp.Process, queue_type=mp.Queue, ) return await self.scheduler.run() async def coro_bispectrum_analysis( self, signals: SignalPairs, params: BAParams, on_progress: Callable[[int, int], None], ) -> List[Tuple]: """ Performs wavelet bispectrum analysis on signal pairs. Used in "wavelet bispectrum analysis". :param signals: the signal pairs :param params: the parameters to use in the algorithm :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) return await self.scheduler.map( target=_bispectrum_analysis, args=[(*pair, params) for pair in signals.get_pairs()], subtasks=4, process_type=mp.Process, queue_type=mp.Queue, ) async def coro_biphase( self, signals: SignalPairs, fs: float, f0: float, fr: Tuple[float, float], on_progress: Callable[[int, int], None], ) -> List[Tuple]: """ Calculates biphase and biamplitude. Used in "wavelet bispectrum analysis". :param signals: the signal pairs :param fs: the sampling frequency :param f0: the resolution :param fr: 'x' and 'y' frequencies :param on_progress: progress callback :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) args = [(s1, s2, fs, f0, fr, s1.output_data.opt) for s1, s2 in signals.get_pairs()] return await self.scheduler.map(target=_biphase, args=args, process_type=mp.Process, queue_type=mp.Queue) async def coro_group_coherence(self, sig1a: ndarray, sig1b: ndarray, fs: float, percentile: Optional[float], on_progress: Callable[[int, int], None], *args, **kwargs) -> List[Tuple]: """ Calculates group coherence. Parameters ---------- sig1a : ndarray The set of signals A for group 1. sig1b : ndarray The set of signals B for group 1. fs : float The sampling frequency of the signals. percentile : Optional[float] The percentile at which the surrogates will be subtracted. on_progress : Callable Function called to report progress. args Arguments to pass to the wavelet transform. kwargs Keyword arguments to pass to the wavelet transform. Returns ------- freq : ndarray [1D array] The frequencies. coh1 : ndarray [2D array] The residual coherence for group 1. surr1 : ndarray [3D array] The surrogates for group 1. """ self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) return await self.scheduler.map( target=functools.partial( pymodalib.group_coherence, sig1a, sig1b, fs, percentile, True, *args, **kwargs, ), args=[ tuple(), ], ) async def coro_dual_group_coherence(self, sig1a: ndarray, sig1b: ndarray, sig2a: ndarray, sig2b: ndarray, fs: float, percentile: Optional[float], on_progress: Callable[[int, int], None], *args, **kwargs) -> List[Tuple]: """ Calculates group coherence. Parameters ---------- sig1a : ndarray The set of signals A for group 1. sig1b : ndarray The set of signals B for group 1. sig2a : ndarray The set of signals A for group 2. sig2b : ndarray The set of signals B for group 2. fs : float The sampling frequency of the signals. percentile : Optional[float] The percentile at which the surrogates will be subtracted. on_progress : Callable Function called to report progress. args Arguments to pass to the wavelet transform. kwargs Keyword arguments to pass to the wavelet transform. Returns ------- freq : ndarray [1D array] The frequencies. coh1 : ndarray [2D array] The residual coherence for group 1. coh2 : ndarray [2D array] The residual coherence for group 2. surr1 : ndarray [3D array] The surrogates for group 1. surr2 : ndarray [3D array] The surrogates for group 2. """ self.stop() self.scheduler = Scheduler( progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) return await self.scheduler.map( target=functools.partial( pymodalib.dual_group_coherence, sig1a, sig1b, sig2a, sig2b, fs, percentile, *args, **kwargs, ), args=[ tuple(), ], ) async def coro_statistical_test( self, freq: ndarray, coh1: ndarray, coh2: ndarray, bands: List[Tuple[float, float]], on_progress: Callable[[int, int], None], ) -> Dict[Tuple[float, float], float]: """ Performs a statistical test on the results of group phase coherence, to check for significance. Parameters ---------- freq : ndarray [1D array] The frequencies from group coherence. coh1 : ndarray [2D array] The coherence of the first group. coh2 : ndarray [2D array] The coherence of the second group. bands : List[Tuple[float,float]] List containing the frequency bands which will be tested for significance. on_progress : Callable Function called to report progress. Returns ------- pvalues : Dict[Tuple[float, float], float] A list containing the p-values for each frequency band. """ self.stop() self.scheduler = Scheduler( run_in_thread=self.should_run_in_thread, progress_callback=on_progress, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) from pymodalib.algorithms.group_coherence import statistical_test results = (await self.scheduler.map( target=statistical_test, args=[( freq, coh1, coh2, bands, )], ))[0] return dict(zip(bands, results)) async def coro_preprocess(self, signals: Union[TimeSeries, List[TimeSeries]], fmin: float, fmax: float) -> List[ndarray]: """ Performs preprocessing on a single signal. :param signals: the signal or signals to perform pre-processing on :param fmin: the minimum frequency :param fmax: the maximum frequency :return: list containing the output from each process """ self.stop() self.scheduler = Scheduler( run_in_thread=True, raise_exceptions=True, capture_stdout=True, only_threads=self.only_threads, ) if isinstance(signals, TimeSeries): signals = [signals] args = [(s.signal, s.frequency, fmin, fmax) for s in signals] return await self.scheduler.map( target=pymodalib.preprocess, args=args, process_type=mp.Process, queue_type=mp.Queue, ) def stop(self): """ Stops the tasks in progress. The MPHandler instance can be reused. """ if self.scheduler: self.scheduler.terminate()