def stop(self): """ Signals the process to stop parsing data. :return: """ Log.d(TAG, "Process finishing...") self._exit.set()
def stop(self): """ Signals the process to stop acquiring data. :return: """ Log.i(TAG, "Process finishing...") self._socket_client.close() self._exit.set()
def _update_sample_size(self): """ Updates the sample size of the plot. This function is connected to the valueChanged signal of the sample Spin Box. :return: """ if self.worker is not None: Log.i(TAG, "Changing sample size") self.worker.reset_buffers(self.ui.sBox_Samples.value())
def closeEvent(self, evnt): """ Overrides the QTCloseEvent. This function is connected to the clicked signal of the close button of the window. :param evnt: QT evnt. :return: """ if self.worker.is_running(): Log.i(TAG, "Window closed without stopping capture, stopping it") self.stop()
def stop(self): """ Stops the acquisition of the selected serial port. This function is connected to the clicked signal of the Stop button. :return: """ Log.i(TAG, "Clicked stop") self._timer_plot.stop() self._enable_ui(True) self.worker.stop()
def set_user_log_level(self): """ Sets the user specified log level. :return: """ if self._parser is not None: self._parse_log_level() else: Log.w(TAG, "Parser was not created !") return None
def open(self, port='ger', speed='million', timeout=0.01): try: self._socket_client.connect("ws://192.168.4.1:81/") Log.i(TAG, "Socket open") return True except TimeoutError: Log.w(TAG, "Error") return False
def start(self): """ Starts the acquisition of the selected serial port. This function is connected to the clicked signal of the Start button. :return: """ Log.i(TAG, "Clicked start") self.worker = Worker(samples=self.ui.sBox_Samples.value(), export_enabled=self.ui.chBox_export.isChecked()) if self.worker.start(): self._timer_plot.start(Constants.plot_update_ms) self._enable_ui(False)
def run(self): """ Process will monitor the internal buffer to parse raw data and distribute to graph and storage, if needed. The process will loop again after timeout if more data is available. :return: """ Log.d(TAG, "Process starting...") while not self._exit.is_set(): self._consume_queue() sleep(self._consumer_timeout) # last check on the queue to completely remove data. self._consume_queue() Log.d(TAG, "Process finished")
def reset_buffers(self, samples): """ Setup/clear the internal buffers. :param samples: Number of samples for the buffers. :type samples: int. :return: """ self._data_buffers = [] for tmp in Constants.plot_colors: self._data_buffers.append(RingBuffer(samples)) self._time_buffer = RingBuffer(samples) while not self._queue.empty(): self._queue.get() Log.i(TAG, "Buffers cleared")
def run(self): if Architecture.is_python_version(MinimalPython.major, minor=MinimalPython.minor): Log.i(TAG, "Starting SocScrollSave") win = mainWindow.MainWindow(samples=self._args.get_user_samples()) win.setWindowTitle("{} - {}".format(Constants.app_title, Constants.app_version)) win.show() self._app.exec() Log.i(TAG, "Finishing SocScrollSave\n") win.close() else: self._fail() self.close()
def __init__(self, parser_process): """ Initialises values for process. :param parser_process: Reference to a ParserProcess instance. :type parser_process: ParserProcess """ multiprocessing.Process.__init__(self) self._exit = multiprocessing.Event() self._parser = parser_process self._socket_client = websocket.WebSocket() Log.i(TAG, "Process Ready")
def run(self): """ Process will monitor the internal buffer to write data to the export file, and the process will loop again after timeout if more data is available. :return: """ Log.i(TAG, "Process starting...") self._csv = csv.writer(self._file, delimiter=Constants.csv_delimiter, quoting=csv.QUOTE_MINIMAL) while not self._exit.is_set(): self._consume_queue() sleep(self._timeout) # last check on the queue to completely remove data. self._consume_queue() Log.i(TAG, "Process finished") self._file.close()
def _create_file(filename, path=None, extension=Constants.csv_extension): """ Creates the file to export the data. :param filename: Name of the file where data will be exported. :type filename: str. :param path: Path where data file will be saved. :type path: str. :param extension: Extension to give to the export file. :type extension: str. :return: Reference to the export file. """ FileManager.create_dir(path) full_path = FileManager.create_file(filename, extension=extension, path=path) if not FileManager.file_exists(full_path): Log.i(TAG, "Storing in {}".format(full_path)) return open(full_path, "a", newline='') return None
def run(self): """ Reads the socket until a stop call is made. :return: """ buffer_size = 20 Log.i(TAG, "Process starting...") timestamp = time() while not self._exit.is_set(): stamp = time() - timestamp try: data = self._socket_client.recv() #print(data) if len(data) > 0: self._parser.add([stamp, data]) except: print("Data_error") Log.i(TAG, "Process finished")
def _parse_csv(self, time, line): """ Parses incoming data and distributes to external processes. :param time: Timestamp. :type time: float. :param line: Raw data coming from acquisition process. :type line: basestring. :return: """ if len(line) > 0: try: if type(line) == bytes: values = line.decode("UTF-8").split(self._split) elif type(line) == str: values = line.split(self._split) else: raise TypeError values = [float(v) for v in values] Log.d(TAG, values) self._out_queue.put((time, values)) if self._store_reference is not None: self._store_reference.add(time, values) except ValueError: Log.w(TAG, "Can't convert to float. Raw: {}".format(line.strip())) except AttributeError: Log.w( TAG, "Attribute error on type ({}). Raw: {}".format( type(line), line.strip()))
def __init__(self, filename=None, path=None, timeout=0.5): """ Sets up the file to export the data as CSV. If filename is not specified, a default name based on time will be used. :param filename: Name of the file where data will be exported. :type filename: str. :param path: Path where data file will be saved. :type path: str. :param timeout: Time to wait after emptying the internal buffer before next write. :type timeout: float. """ multiprocessing.Process.__init__(self) self._exit = multiprocessing.Event() self._store_queue = multiprocessing.Queue() self._csv = None self._file = None self._timeout = timeout if filename is None: filename = strftime(Constants.csv_default_filename, gmtime()) self._file = self._create_file(filename, path=path) Log.i(TAG, "Process ready")
def __init__(self, data_queue, store_reference=None, split=Constants.csv_delimiter, consumer_timeout=Constants.parser_timeout_ms): """ :param data_queue: Reference to Queue where processed data will be put. :type data_queue: multiprocessing Queue. :param store_reference: Reference to CSVProcess instance, if needed. :type store_reference: CSVProcess (multiprocessing.Process) :param split: Delimiter in incoming data. :type split: str. :param consumer_timeout: Time to wait after emptying the internal buffer before next parsing. :type consumer_timeout: float. """ multiprocessing.Process.__init__(self) self._exit = multiprocessing.Event() self._in_queue = multiprocessing.Queue() self._out_queue = data_queue self._consumer_timeout = consumer_timeout self._split = split self._store_reference = store_reference Log.d(TAG, "Process ready")
def close(self): self._app.exit() Log.close() sys.exit()
def _fail(): txt = str("SocScrollSave requires Python {}.{} to run".format( MinimalPython.major, MinimalPython.minor)) Log.e(TAG, txt)