Пример #1
0
    def __init__(self,
                 device_name,
                 process_name,
                 exception_queue,
                 command_queue,
                 valid_commands=None):
        """Initialize SwitchboardProcess with the arguments provided.

    Args:
        device_name (str): of the device for exception error messages
        process_name (str): to use for process name and exception error
          messages
        exception_queue (Queue): to use for reporting exception traceback
          message from subprocess
        command_queue (Queue): to receive commands into
        valid_commands (Optional[Tuple[str, ...]]): Valid command strings.
    """
        self.device_name = device_name
        self.process_name = process_name
        self._command_queue = command_queue
        self._exception_queue = exception_queue
        gdm_logger.switch_to_multiprocess_logging()
        self.logging_queue = gdm_logger.get_logging_queue()
        self._start_event = multiprocessing_utils.get_context().Event()
        self._stop_event = multiprocessing_utils.get_context().Event()
        self._terminate_event = multiprocessing_utils.get_context().Event()
        self._valid_commands = valid_commands or ()
Пример #2
0
    def setUp(self):
        super().setUp()

        self.command_queue = multiprocessing_utils.get_context().Queue()
        self.log_queue = multiprocessing_utils.get_context().Queue()
        self.raw_data_queue = multiprocessing_utils.get_context().Queue()
        self.call_result_queue = multiprocessing_utils.get_context().Queue()
Пример #3
0
    def __init__(self,
                 device_name,
                 exception_queue,
                 command_queue,
                 log_queue,
                 transport,
                 call_result_queue,
                 raw_data_queue=None,
                 raw_data_id=0,
                 framer=None,
                 partial_line_timeout=PARTIAL_LINE_TIMEOUT,
                 read_timeout=_READ_TIMEOUT,
                 max_read_bytes=_MAX_READ_BYTES,
                 max_write_bytes=_MAX_WRITE_BYTES):
        """Initialize TransportProcess with the arguments provided.

    Args:
      device_name (str): name of device using this transport
      exception_queue (Queue): to use for reporting exception traceback
        message from subprocess
      command_queue (Queue): to receive commands into
      log_queue (Queue): to write each log line with host stamp added
      transport (Transport): to use to receive and send raw data
      call_result_queue (Queue): to write transport call responses to.
      raw_data_queue (Queue): to put raw (if applicable, detokenized) data
        into when enabled.
      raw_data_id (int): unique identifier for data published by this
        transport process to the raw_data_queue.
      framer (DataFramer): to use to frame raw data into partial and
        complete lines.
      partial_line_timeout (float): time in seconds to wait before adding
        partial lines to raw_data_queue and log_queue.
      read_timeout (float): time to wait in seconds for transport reads.
      max_read_bytes (int): to attempt to read on each transport read
        call.
      max_write_bytes (int): to attempt to write on each transport write
        call.
    """
        process_name = "{}-Transport{}".format(device_name, raw_data_id)
        super(TransportProcess,
              self).__init__(device_name,
                             process_name,
                             exception_queue,
                             command_queue,
                             valid_commands=_ALL_VALID_COMMANDS)
        self._buffered_unicode = u""
        self._framer = framer or data_framer.NewlineFramer()
        self._log_queue = log_queue
        self._max_read_bytes = max_read_bytes
        self._max_write_bytes = max_write_bytes
        self._partial_line_timeout = partial_line_timeout
        self._partial_log_time = time.time()
        self._pending_writes = None
        self._raw_data_enabled = multiprocessing_utils.get_context().Event()
        self._call_result_queue = call_result_queue
        self._raw_data_id = raw_data_id
        self._raw_data_queue = raw_data_queue
        self._read_timeout = read_timeout
        self._transport_open = multiprocessing_utils.get_context().Event()
        self.transport = transport
Пример #4
0
 def test_switchboard_process(self):
     """Test starting a Switchboard child process."""
     command_queue = multiprocessing_utils.get_context().Queue()
     exception_queue = multiprocessing_utils.get_context().Queue()
     proc = switchboard_process.SwitchboardProcess("some_device",
                                                   "some_process",
                                                   exception_queue,
                                                   command_queue)
     proc.start()
     proc.stop()
Пример #5
0
 def __init__(self,
              baudrate=115200,
              generate_lines=False,
              generate_raw_log_lines=None,
              fail_open=False,
              fail_read=False,
              failure_message=EXCEPTION_MESSAGE,
              write_read_func=None,
              open_on_start=True,
              read_only_if_raw_data_queue_enabled=False):
     self.comms_address = "/some/serial/path"
     self.bytes_per_second = multiprocessing_utils.get_context().Value(
         "f", 0.0)
     self.is_open_count = multiprocessing_utils.get_context().Value("i", 0)
     self.open_count = multiprocessing_utils.get_context().Value("i", 0)
     self.close_count = multiprocessing_utils.get_context().Value("i", 0)
     self.read_size = multiprocessing_utils.get_context().Value("i", 0)
     self.reads = multiprocessing_utils.get_context().Queue()
     self.writes = multiprocessing_utils.get_context().Queue()
     self._baudrate = baudrate
     self._exit_flag = multiprocessing_utils.get_context().Event()
     self._fail_open = fail_open
     self._fail_read = fail_read
     self._generate_lines = generate_lines
     self._generate_raw_log_lines = generate_raw_log_lines
     self._properties = {}
     self._failure_message = failure_message
     self._transport_open = multiprocessing_utils.get_context().Event()
     self._write_read_func = write_read_func
     self._properties[transport_properties.OPEN_ON_START] = open_on_start
     # Note: if using read_only_if_raw_data_queue_enabled flag, your test must
     # call bind_raw_data_enabled_method().
     self._read_only_if_raw_data_queue_enabled = read_only_if_raw_data_queue_enabled
     self._raw_data_queue_enabled_method = None
Пример #6
0
 def test_simple_process(self):
     """Test spawning a multiprocessing.Process() child process."""
     logging_queue = gdm_logger.get_logging_queue()
     proc = multiprocessing_utils.get_context().Process(
         target=target, args=(logging_queue, ))
     proc.start()
     proc.join()
Пример #7
0
  def test_process_wrapper_exception_call(
      self, mock_manager_class, mock_import, mock_register, mock_get_logger,
      mock_initialize_logging):
    """Tests _process_wrapper for a process where function raises an error."""
    mock_manager = mock_manager_class.return_value
    mock_logger = mock_get_logger.return_value
    multiprocessing_queue = multiprocessing_utils.get_context().Queue()
    return_queue = mock.MagicMock(spec=multiprocessing_queue)
    error_queue = mock.MagicMock(spec=multiprocessing_queue)
    process_id = "1"
    mock_function = mock.MagicMock()
    mock_function.__name__ = "mock_function"
    mock_function.side_effect = RuntimeError("Something went wrong")
    args = (1, 2)
    kwargs = {"foo": "bar"}
    parallel_utils._process_wrapper(
        return_queue=return_queue,
        error_queue=error_queue,
        logging_queue=mock.MagicMock(spec=multiprocessing_queue),
        process_id=process_id,
        # "foo.package" imports but fails registration.
        # "bar.package" fails to import.
        extension_package_import_paths=["foo.package", "bar.package"],
        call_spec=parallel_utils.CallSpec(mock_function, *args, **kwargs))

    mock_import.assert_has_calls(
        [mock.call("foo.package"), mock.call("bar.package")])
    mock_register.assert_called_once()
    mock_manager_class.assert_called_once()
    mock_function.assert_called_once_with(mock_manager, *args, **kwargs)
    mock_logger.warning.assert_called()
    return_queue.put.assert_not_called()
    error_queue.put.assert_called_once_with(
        (process_id, (RuntimeError.__name__, "Something went wrong")))
    mock_manager.close.assert_called_once()
Пример #8
0
 def test_001_transport_enqueue_command_writes_below_split(self):
     """Test _enqueue_command_writes can split commands below max write limit."""
     write_queue = multiprocessing_utils.get_context().Queue()
     command = "short command"
     transport_process._enqueue_command_writes(write_queue, command)
     wait_for_queue_writes(write_queue)
     self._verify_command_split(command, write_queue)
Пример #9
0
    def test_205_transport_auto_reopen_unexpected_close(self):
        """Test transport process reopens if it closes unexpectedly."""
        transport = mock.MagicMock(spec=fake_transport.FakeTransport)
        transport._properties = {}
        transport._properties[transport_properties.AUTO_REOPEN] = True
        transport._transport_open = mock.MagicMock(
            spec=multiprocessing_utils.get_context().Event())
        transport.is_open.side_effect = iter([False])
        self.uut = transport_process.TransportProcess(
            "fake_transport",
            self.exception_queue,
            self.command_queue,
            self.log_queue,
            transport,
            call_result_queue=self.call_result_queue)

        self.uut._pre_run_hook()
        transport.open.assert_called_once()

        self.uut._do_work()  # Should reopen here
        self.assertEqual(
            2, transport.open.call_count,
            "Expected transport.open to be called {} called {}".format(
                2, transport.open.call_count))
        # If transport closes unexpectedly, transport.close() should be called
        # before re-opening the transport to clean up resources (b/183527797).
        transport.close.assert_called_once()

        self.uut._post_run_hook()
        self.assertEqual(transport.close.call_count, 2)
Пример #10
0
    def test_204_transport_auto_reopen_with_close(self):
        """Transport process shouldn't reopen after being closed via close()."""
        transport = mock.MagicMock(spec=fake_transport.FakeTransport)
        transport._properties = {}
        transport._properties[transport_properties.AUTO_REOPEN] = True
        transport._transport_open = mock.MagicMock(
            spec=multiprocessing_utils.get_context().Event())
        transport.is_open.side_effect = iter([False, False])
        self.uut = transport_process.TransportProcess(
            "fake_transport",
            self.exception_queue,
            self.command_queue,
            self.log_queue,
            transport,
            call_result_queue=self.call_result_queue)

        self.uut._pre_run_hook()
        transport.open.assert_called_once()

        self.command_queue.put((transport_process.CMD_TRANSPORT_CLOSE, None))
        wait_for_queue_writes(self.command_queue)
        self.uut._do_work()
        transport.close.assert_called()

        self.uut._do_work()  # Shouldn't reopen here

        self.uut._post_run_hook()
        transport.open.assert_called_once()
Пример #11
0
    def start(self, wait_for_start: bool = True) -> None:
        """Starts the process.

    Args:
        wait_for_start: Whether to wait for the process to start. If False, the
            caller is responsible for calling wait_for_start() separately.

    Raises:
        RuntimeError: if the process has already been started or fails to start.
    """
        if not self.is_started():
            self._start_event.clear()
            self._stop_event.clear()
            parent_pid = os.getpid()
            process = multiprocessing_utils.get_context().Process(
                name=self.process_name,
                target=_process_loop,
                args=(self, parent_pid))
            process.start()
            if wait_for_start:
                self.wait_for_start()
            self._process = process
        else:
            raise RuntimeError("Device {} failed to start child process {}. "
                               "Child process is already running.".format(
                                   self.device_name, self.process_name))
Пример #12
0
  def test_process_wrapper_successful_call(
      self, mock_manager_class, mock_import, mock_register, mock_get_logger,
      mock_initialize_logging):
    """Tests _process_wrapper for a process where there are no errors."""
    mock_manager = mock_manager_class.return_value
    mock_logger = mock_get_logger.return_value
    multiprocessing_queue = multiprocessing_utils.get_context().Queue()
    return_queue = mock.MagicMock(spec=multiprocessing_queue)
    error_queue = mock.MagicMock(spec=multiprocessing_queue)
    logging_queue = mock.MagicMock(spec=multiprocessing_queue)
    process_id = "1"
    mock_function = mock.MagicMock()
    mock_function.__name__ = "mock_function"
    args = (1, 2)
    kwargs = {"foo": "bar"}
    parallel_utils._process_wrapper(
        return_queue=return_queue,
        error_queue=error_queue,
        logging_queue=logging_queue,
        process_id=process_id,
        extension_package_import_paths=["foo.package", "bar.package"],
        call_spec=parallel_utils.CallSpec(mock_function, *args, **kwargs))

    mock_initialize_logging.assert_called_once_with(logging_queue)
    mock_get_logger.assert_called_once()
    mock_logger.debug.assert_called()
    mock_import.assert_has_calls(
        [mock.call("foo.package"), mock.call("bar.package")])
    self.assertEqual(mock_register.call_count, 2)
    mock_manager_class.assert_called_once()
    mock_function.assert_called_once_with(mock_manager, *args, **kwargs)
    return_queue.put.assert_called_once_with(
        (process_id, mock_function.return_value))
    error_queue.put.assert_not_called()
    mock_manager.close.assert_called_once()
 def test_012_switchboard_process_put_message_full_queue(self):
     """Test putting message into an full queue."""
     out_queue = multiprocessing_utils.get_context().Queue(maxsize=1)
     switchboard_process.put_message(out_queue, _ECHO_MESSAGE)
     with self.assertRaises(queue.Full):
         switchboard_process.put_message(out_queue,
                                         _ECHO_MESSAGE,
                                         timeout=0)
 def test_013_switchboard_process_put_message_closed_queue(self):
     out_queue = multiprocessing_utils.get_context().Queue()
     out_queue.close()
     with self.assertRaisesRegex((AssertionError, ValueError),
                                 "Queue.*closed"):
         switchboard_process.put_message(out_queue,
                                         _ECHO_MESSAGE,
                                         timeout=0)
Пример #15
0
 def test_002_transport_enqueue_command_writes_above_split(self):
     """Test _enqueue_command_writes splits commands below max write limit."""
     write_queue = multiprocessing_utils.get_context().Queue()
     command = (
         "this will be a really long command that exceeds the 32 byte "
         "limit")
     transport_process._enqueue_command_writes(write_queue, command)
     wait_for_queue_writes(write_queue)
     self._verify_command_split(command, write_queue)
Пример #16
0
 def set_open(self):
     self._transport_open.set()
     if self._generate_lines:
         self._generator = multiprocessing_utils.get_context().Process(
             target=_produce_data,
             args=(self._baudrate / 10, self.bytes_per_second,
                   self._exit_flag, self._generate_raw_log_lines,
                   self.reads))
         self.daemon = True
         self._generator.start()
 def test_004_switchboard_process_get_message_nonempty_queue(self):
     """Test getting message from an empty queue."""
     in_queue = multiprocessing_utils.get_context().Queue()
     in_queue.put(_ECHO_MESSAGE)
     wait_for_queue_writes(in_queue)
     message = switchboard_process.get_message(in_queue, timeout=0)
     self.assertEqual(
         _ECHO_MESSAGE, message,
         "Expected {} from nonempty queue found {}".format(
             _ECHO_MESSAGE, message))
Пример #18
0
 def test_get_message_from_queue(self):
     """Test a Switchboard process getting a message from a queue."""
     echo_message = "Message to be echoed"
     in_queue = multiprocessing_utils.get_context().Queue()
     in_queue.put(echo_message)
     switchboard_process.wait_for_queue_writes(in_queue)
     message = switchboard_process.get_message(in_queue, timeout=0)
     self.assertEqual(
         echo_message, message,
         "Expected {} from nonempty queue found {}".format(
             echo_message, message))
    def test_031_switchboard_process_wrapper_skips_exception(
            self, mock_psutil_proc):
        """Test child_process_wrapper skips exception_queue message on EOFError."""
        error_queue = multiprocessing_utils.get_context().Queue()

        with switchboard_process._child_process_wrapper(
                1234, "fake_process", "fake_device", error_queue):
            raise EOFError("end of file")
        wait_for_queue_writes(error_queue, timeout=0.5)
        self.assertTrue(
            error_queue.empty(),
            "Expected exception queue to be empty, but found contents")
Пример #20
0
    def test_switchboard_process_loop(self, mock_psutil_proc):
        """Process loop calls _do_work at least once."""
        logging_queue = multiprocessing_utils.get_context().Queue()
        exception_queue = multiprocessing_utils.get_context().Queue()
        mock_start_event = mock.MagicMock(
            spec=multiprocessing_utils.get_context().Event())
        mock_stop_event = mock.MagicMock(
            spec=multiprocessing_utils.get_context().Event())
        mock_terminate_event = mock.MagicMock(
            spec=multiprocessing_utils.get_context().Event())

        mock_switchboard_process = mock.MagicMock(
            spec=switchboard_process.SwitchboardProcess)
        mock_switchboard_process.device_name = "mock_device"
        mock_switchboard_process.process_name = "mock_process"
        mock_switchboard_process.logging_queue = logging_queue
        mock_switchboard_process._exception_queue = exception_queue
        mock_switchboard_process._start_event = mock_start_event
        mock_switchboard_process._stop_event = mock_stop_event
        mock_switchboard_process._terminate_event = mock_terminate_event

        mock_parent_proc = mock_psutil_proc.return_value
        mock_switchboard_process._pre_run_hook.return_value = True
        mock_terminate_event.is_set.return_value = False
        mock_switchboard_process._do_work.return_value = False

        with mock.patch.object(
                gdm_logger,
                "initialize_child_process_logging") as mock_init_logging:
            switchboard_process._process_loop(mock_switchboard_process, 1234)

        mock_init_logging.assert_called_once()
        mock_start_event.set.assert_called_once()
        mock_switchboard_process._pre_run_hook.assert_called_once()
        mock_parent_proc.status.assert_called_once()
        mock_terminate_event.is_set.assert_called_once()
        mock_terminate_event.clear.assert_not_called()
        mock_switchboard_process._do_work.assert_called_once()
        mock_stop_event.set.assert_called_once()
    def test_030_switchboard_process_wrapper_puts_exception(
            self, mock_psutil_proc):
        """Test child_process_wrapper puts message in exception_queue."""
        error_queue = multiprocessing_utils.get_context().Queue()

        with switchboard_process._child_process_wrapper(
                1234, "fake_process", "fake_device", error_queue):
            raise Exception("my_exception")
        wait_for_queue_writes(error_queue)
        message = error_queue.get_nowait()
        self.assertIn(
            "Device fake_device raised exception in fake_process", message,
            "Expected 'Device ... raised exception in ...' found {!r}".format(
                message))
Пример #22
0
    def setUp(self):
        super().setUp()

        with mock.patch.object(multiprocessing_utils.get_context(), "Queue"):
            self.uut = fire_manager.FireManager(debug=False,
                                                dev_debug=False,
                                                quiet=False)
        self.mock_switchboard = mock.MagicMock(
            spec=switchboard.SwitchboardDefault)
        self.mock_switchboard.device_name = "FakeDevice"
        self.mock_switchboard.button_list = []
        self.uut.create_switchboard = mock.MagicMock(
            return_value=self.mock_switchboard)
        self.uut.reload_configuration(  # Load the mock device configuration files
            device_file_name=self.files["device_file_name"],
            options_file_name=self.files["device_options_file_name"],
            testbeds_file_name=self.files["testbeds_file_name"],
            gdm_config_file_name=self.files["gdm_config_file_name"],
            log_directory=self.artifacts_directory)
Пример #23
0
def switch_to_multiprocess_logging() -> None:
    """Initializes multiprocessing logging in the main process."""
    if is_multiprocess_logging_enabled():
        return

    global _logging_queue
    global _logging_thread
    _logging_queue = multiprocessing_utils.get_context().Queue()
    _logging_queue.cancel_join_thread()
    _logging_thread = multiprocess_logging.LoggingThread(_logging_queue)
    queue_handler = multiprocess_logging.QueueHandler(_logging_queue)
    queue_handler.setLevel(logging.DEBUG)

    logger = get_logger()
    for handler in logger.handlers:  # Transfer log handlers to queue handler.
        _logging_thread.add_handler(handler)
    logger.handlers = [queue_handler]

    _logging_thread.start()
    atexit.register(common_utils.MethodWeakRef(_logging_thread.stop))
 def test_103_switchboard_process_two_running_processes(self):
     """Test two started running processes."""
     uut1 = RunningProcess("fake_device", "echo_process1",
                           self.exception_queue, self.command_queue)
     command_queue2 = multiprocessing_utils.get_context().Queue()
     uut2 = RunningProcess("fake_device", "echo_process2",
                           self.exception_queue, command_queue2)
     uut1.start()
     self.assertTrue(uut1.is_running(),
                     "Expected process1 to be running, found !running")
     uut2.start()
     self.assertTrue(uut2.is_running(),
                     "Expected process2 to be running, found !running")
     uut1.stop()
     self.assertFalse(uut1.is_running(),
                      "Expected process1 to be not running, found running")
     uut2.stop()
     self.assertFalse(uut2.is_running(),
                      "Expected process2 to be not running, found running")
     del uut1
     del uut2
Пример #25
0
def execute_concurrently(
    call_specs: Sequence[CallSpec],
    timeout=TIMEOUT_PROCESS,
    raise_on_process_error=True
) -> Tuple[List[Any], List[Optional[Tuple[str, str]]]]:
    """Concurrently executes function calls in parallel processes.

  Args:
    call_specs: Specifications for each of the parallel executions.
    timeout: Time to wait before terminating all of the parallel processes.
    raise_on_process_error: If True, raise an error if any of the parallel
      processes encounters an error. If False, return a list of errors which
      occurred in the parallel processes along with the received results.

  Returns:
    A tuple of (parallel_process_return_values, parallel_process_errors).
    The order of return values and errors corresponds to the order of provided
    call_specs. parallel_process_return_values will contain return values of the
    functions executed in parallel. If a parallel process fails, the
    corresponding entry in the return value list will be NO_RESULT.
    Errors are only returned if raise_on_process_error is False.
    Each error is specified as a tuple of (error_type, error_message).
    If a parallel process succeeds (there's no error), the corresponding entry
    in the error list will be None.

  Raises:
    ParallelUtilsError: If raise_on_process_error is True and any of the
      parallel processes encounters an error.
  """
    return_queue = multiprocessing_utils.get_context().Queue()
    error_queue = multiprocessing_utils.get_context().Queue()
    gdm_logger.switch_to_multiprocess_logging()
    logging_queue = gdm_logger.get_logging_queue()
    extension_package_import_paths = [
        package_info["import_path"]
        for package_name, package_info in extensions.package_info.items()
        if package_name != "gazoo_device_controllers"  # Built-in controllers.
    ]

    processes = []
    for proc_id, call_spec in enumerate(call_specs):
        processes.append(multiprocessing_utils.get_context().Process(
            target=_process_wrapper,
            args=(return_queue, error_queue, logging_queue, proc_id,
                  extension_package_import_paths, call_spec),
        ))

    deadline = time.time() + timeout
    for process in processes:
        process.start()

    for process in processes:
        remaining_timeout = max(0,
                                deadline - time.time())  # ensure timeout >= 0
        process.join(timeout=remaining_timeout)
        if process.is_alive():
            process.terminate()
            process.join(timeout=_TIMEOUT_TERMINATE_PROCESS)
            if process.is_alive():
                process.kill()
                process.join(timeout=_TIMEOUT_TERMINATE_PROCESS)

    proc_results = [NO_RESULT] * len(call_specs)
    for proc_id, result in _read_all_from_queue(return_queue):
        proc_results[proc_id] = result

    proc_errors = [None] * len(call_specs)
    for proc_id, error_type_and_message in _read_all_from_queue(error_queue):
        proc_errors[proc_id] = error_type_and_message

    # We might not receive any results from a process if it times out or dies
    # unexpectedly. Mark such cases as errors.
    for proc_id in range(len(call_specs)):
        if proc_results[proc_id] == NO_RESULT and proc_errors[proc_id] is None:
            proc_errors[proc_id] = (
                errors.ResultNotReceivedError.__name__,
                "Did not receive any results from the process.")

    if raise_on_process_error and any(proc_errors):
        raise errors.ParallelUtilsError(
            f"Encountered errors in parallel processes: {proc_errors}")

    return proc_results, proc_errors
Пример #26
0
    def setUp(self):
        super().setUp()

        self.mock_parser = mock.MagicMock(
            spec=event_parser_default.EventParserDefault)
        self.command_queue = multiprocessing_utils.get_context().Queue()
 def test_003_switchboard_process_get_message_empty_queue(self):
     """Test getting message from an empty queue."""
     in_queue = multiprocessing_utils.get_context().Queue()
     message = switchboard_process.get_message(in_queue, timeout=0)
     self.assertIsNone(
         message, "Expected None from empty queue found {}".format(message))
 def setUp(self):
     super().setUp()
     self.command_queue = multiprocessing_utils.get_context().Queue()
import time
from unittest import mock

from gazoo_device import gdm_logger
from gazoo_device.switchboard import switchboard_process
from gazoo_device.tests.unit_tests.utils import unit_test_case
from gazoo_device.utility import multiprocessing_utils
import psutil

MagicMock = mock.MagicMock

_ECHO_MESSAGE = "My message to be echoed"
_EXIT_TIMEOUT = 1
_EXCEPTION_MESSAGE = "Test exception handler"
_EXCEPTION_TIMEOUT = 3
_MULTIPROCESSING_EVENT = multiprocessing_utils.get_context().Event()

wait_for_queue_writes = switchboard_process.wait_for_queue_writes


def do_work_return_true():
    return True


class BadQueue:
    pass


class ErrorProcess(switchboard_process.SwitchboardProcess):
    def _do_work(self):
        raise RuntimeError(_EXCEPTION_MESSAGE)