Example #1
0
def test_DataAnalyzerProcess__processes_change_magnetometer_config_command(
    four_board_analyzer_process_beta_2_mode, ):
    da_process = four_board_analyzer_process_beta_2_mode["da_process"]
    from_main_queue = four_board_analyzer_process_beta_2_mode[
        "from_main_queue"]

    test_num_wells = 24
    expected_wells = [5, 6, 15, 16]
    test_config_dict = create_magnetometer_config_dict(test_num_wells)
    for well_idx in expected_wells:
        module_id = SERIAL_COMM_WELL_IDX_TO_MODULE_ID[well_idx]
        test_config_dict[module_id][3] = True

    expected_sampling_period = 15000
    set_sampling_period_command = {
        "communication_type": "acquisition_manager",
        "command": "change_magnetometer_config",
        "magnetometer_config": test_config_dict,
        "sampling_period": expected_sampling_period,
    }
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        set_sampling_period_command, from_main_queue)

    invoke_process_run_and_check_errors(da_process)
    assert da_process.get_active_wells() == expected_wells
    expected_buffer_size = MIN_NUM_SECONDS_NEEDED_FOR_ANALYSIS * int(
        1e6 / expected_sampling_period)
    assert da_process.get_buffer_size() == expected_buffer_size
Example #2
0
def test_DataAnalyzerProcess_beta_2_performance__fill_data_analysis_buffer(
    runnable_four_board_analyzer_process, ):
    # 11 seconds of data (100 Hz) coming in from File Writer to going through to Main
    #
    # initial pulse3D import:                             1.662150824
    # pulse3D 0.23.3:                                     1.680566285

    p, board_queues, comm_from_main_queue, comm_to_main_queue, _ = runnable_four_board_analyzer_process
    p._beta_2_mode = True
    p.change_magnetometer_config(
        {
            "magnetometer_config": DEFAULT_MAGNETOMETER_CONFIG,
            "sampling_period": DEFAULT_SAMPLING_PERIOD
        }, )

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        dict(START_MANAGED_ACQUISITION_COMMUNICATION),
        comm_from_main_queue,
    )
    invoke_process_run_and_check_errors(p, perform_setup_before_loop=True)

    num_seconds = MIN_NUM_SECONDS_NEEDED_FOR_ANALYSIS + 1
    fill_da_input_data_queue(board_queues[0][0], num_seconds)
    start = time.perf_counter_ns()
    invoke_process_run_and_check_errors(p, num_iterations=num_seconds)
    dur_seconds = (time.perf_counter_ns() - start) / 10**9

    # prevent BrokenPipeErrors
    drain_queue(board_queues[0][1])
    drain_queue(comm_to_main_queue)

    # print(f"Duration (seconds): {dur_seconds}")  # Eli (4/8/21): this is commented code that is deliberately kept in the codebase since it is often toggled on/off during optimization
    assert dur_seconds < 10
Example #3
0
def test_DataAnalyzerProcess__correctly_loads_construct_sensor_data_to_buffer_when_not_empty(
        test_well_index, test_construct_data, test_description,
        four_board_analyzer_process):
    p, board_queues, comm_from_main_queue, _, _ = four_board_analyzer_process
    p.init_streams()
    incoming_data = board_queues[0][0]

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        get_mutable_copy_of_START_MANAGED_ACQUISITION_COMMUNICATION(),
        comm_from_main_queue,
    )
    invoke_process_run_and_check_errors(p)

    test_construct_dict = {
        "is_reference_sensor": False,
        "well_index": test_well_index,
        "data": test_construct_data,
    }
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        test_construct_dict, incoming_data)
    data_buffer = p._data_buffer  # pylint:disable=protected-access

    expected_construct_data = [[0, 0], [0, 0]]
    data_buffer[test_well_index]["construct_data"] = copy.deepcopy(
        expected_construct_data)

    invoke_process_run_and_check_errors(p)

    expected_construct_data[0].extend(test_construct_data[0])
    expected_construct_data[1].extend(test_construct_data[1])
    np.testing.assert_equal(data_buffer[test_well_index]["construct_data"],
                            expected_construct_data)
Example #4
0
def test_put_object_into_queue_and_raise_error_if_eventually_still_empty__raises_error_if_queue_not_populated(
    mocker,
):
    q = Queue()
    mocker.patch.object(q, "put", autospec=True)
    with pytest.raises(QueueStillEmptyError):
        put_object_into_queue_and_raise_error_if_eventually_still_empty("bill", q)
Example #5
0
def test_DataAnalyzerProcess_beta_1_performance__single_data_packet_per_well_without_analysis(
    runnable_four_board_analyzer_process, ):
    # 1 second of data (625 Hz) coming in from File Writer to going through to Main
    #
    # start:                                0.530731389
    # added twitch metric analysis:         0.578328276
    # initial pulse3D import:               0.533860423
    # pulse3D 0.23.3:                       0.539447351

    p, board_queues, comm_from_main_queue, comm_to_main_queue, _ = runnable_four_board_analyzer_process
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        get_mutable_copy_of_START_MANAGED_ACQUISITION_COMMUNICATION(),
        comm_from_main_queue,
    )
    invoke_process_run_and_check_errors(p, perform_setup_before_loop=True)

    num_seconds = 1
    fill_da_input_data_queue(board_queues[0][0], num_seconds)
    start = time.perf_counter_ns()
    invoke_process_run_and_check_errors(p,
                                        num_iterations=num_seconds * (24 + 6))
    dur_seconds = (time.perf_counter_ns() - start) / 10**9

    # prevent BrokenPipeErrors
    drain_queue(board_queues[0][1])
    drain_queue(comm_to_main_queue)

    # print(f"Duration (seconds): {dur_seconds}")  # Eli (4/8/21): this is commented code that is deliberately kept in the codebase since it is often toggled on/off during optimization
    assert dur_seconds < 2
Example #6
0
def test_DataAnalyzerProcess_beta_1_performance__first_second_of_data_with_analysis(
    runnable_four_board_analyzer_process, ):
    # Fill data analysis buffer with 10 seconds of data to start metric analysis,
    # Then record duration of sending 1 additional second of data
    #
    # start:                                0.547285524
    # initial pulse3D import:               0.535316489
    # pulse3D 0.23.3:                       0.535428579

    p, board_queues, comm_from_main_queue, comm_to_main_queue, _ = runnable_four_board_analyzer_process
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        get_mutable_copy_of_START_MANAGED_ACQUISITION_COMMUNICATION(),
        comm_from_main_queue,
    )
    invoke_process_run_and_check_errors(p, perform_setup_before_loop=True)

    # load data
    num_seconds = MIN_NUM_SECONDS_NEEDED_FOR_ANALYSIS + 1
    fill_da_input_data_queue(board_queues[0][0], num_seconds)
    invoke_process_run_and_check_errors(
        p, num_iterations=MIN_NUM_SECONDS_NEEDED_FOR_ANALYSIS * (24 + 6))

    # send additional data and time analysis
    start = time.perf_counter_ns()
    invoke_process_run_and_check_errors(p, num_iterations=(24 + 6))
    dur_seconds = (time.perf_counter_ns() - start) / 10**9

    # prevent BrokenPipeErrors
    drain_queue(board_queues[0][1])
    drain_queue(comm_to_main_queue)

    # print(f"Duration (seconds): {dur_seconds}")  # Eli (4/8/21): this is commented code that is deliberately kept in the codebase since it is often toggled on/off during optimization
    assert dur_seconds < 2
Example #7
0
def test_DataAnalyzerProcess_beta_1_performance__fill_data_analysis_buffer(
    runnable_four_board_analyzer_process, ):
    # 11 seconds of data (625 Hz) coming in from File Writer to going through to Main
    #
    # mantarray-waveform-analysis v0.3:     4.148136512
    # mantarray-waveform-analysis v0.3.1:   3.829136133
    # mantarray-waveform-analysis v0.4.0:   3.323093677
    # remove concatenate:                   2.966678695
    # 30 Hz Bessel filter:                  2.930061808  # Tanner (9/3/20): not intended to speed anything up, just adding this to show it had it didn't have much affect on performance
    # 30 Hz Butterworth filter:             2.935009033  # Tanner (9/10/20): not intended to speed anything up, just adding this to show it had it didn't have much affect on performance
    #
    # added twitch metric analysis:         3.013469479
    # initial pulse3D import:               3.855403546
    # pulse3D 0.23.3:                       3.890723909

    p, board_queues, comm_from_main_queue, comm_to_main_queue, _ = runnable_four_board_analyzer_process
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        get_mutable_copy_of_START_MANAGED_ACQUISITION_COMMUNICATION(),
        comm_from_main_queue,
    )
    invoke_process_run_and_check_errors(p, perform_setup_before_loop=True)

    num_seconds = MIN_NUM_SECONDS_NEEDED_FOR_ANALYSIS + 1
    fill_da_input_data_queue(board_queues[0][0], num_seconds)
    start = time.perf_counter_ns()
    invoke_process_run_and_check_errors(p,
                                        num_iterations=num_seconds * (24 + 6))
    dur_seconds = (time.perf_counter_ns() - start) / 10**9

    # prevent BrokenPipeErrors
    drain_queue(board_queues[0][1])
    drain_queue(comm_to_main_queue)

    # print(f"Duration (seconds): {dur_seconds}")  # Eli (4/8/21): this is commented code that is deliberately kept in the codebase since it is often toggled on/off during optimization
    assert dur_seconds < 10
Example #8
0
def test_DataAnalyzerProcess__processes_stop_managed_acquisition_command(
    four_board_analyzer_process, ):
    p, _, comm_from_main_queue, _, _ = four_board_analyzer_process

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        get_mutable_copy_of_START_MANAGED_ACQUISITION_COMMUNICATION(),
        comm_from_main_queue,
    )
    invoke_process_run_and_check_errors(p)

    data_buffer = p._data_buffer  # pylint:disable=protected-access
    for well_idx in range(24):
        data_buffer[well_idx]["construct_data"] = [[0, 0, 0], [1, 2, 3]]
        data_buffer[well_idx]["ref_data"] = [[0, 0, 0], [4, 5, 6]]

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        STOP_MANAGED_ACQUISITION_COMMUNICATION,
        comm_from_main_queue,
    )

    invoke_process_run_and_check_errors(p)
    assert p._end_of_data_stream_reached[0] is True  # pylint:disable=protected-access
    assert data_buffer[0]["construct_data"] is None
    assert data_buffer[0]["ref_data"] is None
    assert data_buffer[23]["construct_data"] is None
    assert data_buffer[23]["ref_data"] is None
Example #9
0
def test_put_object_into_queue_and_raise_error_if_eventually_still_empty__passes_timeout_kwarg_to_subfunction(
    mocker,
):
    expected = 2.2
    q = Queue()
    spied_not_empty = mocker.spy(queue_utils, "is_queue_eventually_not_empty")
    put_object_into_queue_and_raise_error_if_eventually_still_empty("bill", q, timeout_seconds=expected)
    spied_not_empty.assert_called_once_with(q, timeout_seconds=expected)
Example #10
0
def test_DataAnalyzerProcess__drain_all_queues__drains_all_queues_except_error_queue_and_returns__all_items(
    four_board_analyzer_process, ):
    expected = [[10, 11], [12, 13], [14, 15], [16, 17]]
    expected_error = "error"
    expected_from_main = "from_main"
    expected_to_main = "to_main"

    (
        data_analyzer_process,
        board_queues,
        from_main_queue,
        to_main_queue,
        error_queue,
    ) = four_board_analyzer_process
    for i, board in enumerate(board_queues):
        for j, queue in enumerate(board):
            queue_item = expected[i][j]
            put_object_into_queue_and_raise_error_if_eventually_still_empty(
                queue_item, queue)

    from_main_queue.put_nowait(expected_from_main)
    to_main_queue.put_nowait(expected_to_main)
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        expected_error, error_queue)
    confirm_queue_is_eventually_of_size(from_main_queue, 1)
    confirm_queue_is_eventually_of_size(to_main_queue, 1)

    actual = data_analyzer_process._drain_all_queues()  # pylint:disable=protected-access

    confirm_queue_is_eventually_of_size(error_queue, 1)
    actual_error = error_queue.get(timeout=QUEUE_CHECK_TIMEOUT_SECONDS)
    assert actual_error == expected_error

    confirm_queue_is_eventually_empty(from_main_queue)
    confirm_queue_is_eventually_empty(to_main_queue)
    confirm_queue_is_eventually_empty(board_queues[3][0])
    confirm_queue_is_eventually_empty(board_queues[2][0])
    confirm_queue_is_eventually_empty(board_queues[1][0])
    confirm_queue_is_eventually_empty(board_queues[0][1])
    confirm_queue_is_eventually_empty(board_queues[0][0])

    assert actual["board_0"]["outgoing_data"] == [expected[0][1]]
    assert actual["board_3"]["file_writer_to_data_analyzer"] == [
        expected[3][0]
    ]
    assert actual["board_2"]["file_writer_to_data_analyzer"] == [
        expected[2][0]
    ]
    assert actual["board_1"]["file_writer_to_data_analyzer"] == [
        expected[1][0]
    ]
    assert actual["board_0"]["file_writer_to_data_analyzer"] == [
        expected[0][0]
    ]
    assert actual["from_main_to_data_analyzer"] == [expected_from_main]
    assert actual["from_data_analyzer_to_main"] == [expected_to_main]
Example #11
0
def put_object_into_queue_and_raise_error_if_eventually_still_empty(
    obj: object,
    the_queue: UnionOfThreadingAndMultiprocessingQueue,
    timeout_seconds: Union[float, int] = QUEUE_CHECK_TIMEOUT_SECONDS,
    sleep_after_put_seconds: Optional[Union[float, int]] = None,
) -> None:
    stdlib_utils.put_object_into_queue_and_raise_error_if_eventually_still_empty(
        obj, the_queue, timeout_seconds=timeout_seconds)
    if sleep_after_put_seconds is not None:
        time.sleep(sleep_after_put_seconds)
Example #12
0
def set_simulator_idle_ready(simulator_fixture):
    simulator = simulator_fixture["simulator"]
    testing_queue = simulator_fixture["testing_queue"]
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        {
            "command": "set_status_code",
            "status_code": SERIAL_COMM_IDLE_READY_CODE
        },
        testing_queue,
    )
    invoke_process_run_and_check_errors(simulator)
Example #13
0
def test_DataAnalyzerProcess__processes_start_managed_acquisition_command__by_draining_outgoing_data_queue(
    four_board_analyzer_process, ):
    p, board_queues, comm_from_main_queue, _, _ = four_board_analyzer_process

    start_command = get_mutable_copy_of_START_MANAGED_ACQUISITION_COMMUNICATION(
    )
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        start_command, comm_from_main_queue)

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        "item", board_queues[0][1])
    invoke_process_run_and_check_errors(p)
    confirm_queue_is_eventually_empty(board_queues[0][1])
Example #14
0
def test_DataAnalyzerProcess__reinits_streams_upon_receiving_stop_managed_acquisition_command(
        four_board_analyzer_process, mocker):
    p, _, from_main_queue, _, _ = four_board_analyzer_process

    spied_init_streams = mocker.spy(p, "init_streams")

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        STOP_MANAGED_ACQUISITION_COMMUNICATION,
        from_main_queue,
    )
    invoke_process_run_and_check_errors(p)

    spied_init_streams.assert_called_once_with()
Example #15
0
def test_DataAnalyzerProcess__raises_error_with_unrecognized_acquisition_manager_command(
        four_board_analyzer_process, mocker, patch_print):
    p, _, comm_from_main_queue, _, _ = four_board_analyzer_process

    expected_command = "fake_command"
    start_command = {
        "communication_type": "acquisition_manager",
        "command": expected_command,
    }
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        start_command, comm_from_main_queue)

    with pytest.raises(UnrecognizedCommandFromMainToDataAnalyzerError,
                       match=expected_command):
        invoke_process_run_and_check_errors(p)
Example #16
0
def test_DataAnalyzerProcess__raises_error_if_communication_type_is_invalid(
        four_board_analyzer_process, mocker, patch_print):
    p, _, comm_from_main_queue, _, _ = four_board_analyzer_process

    invalid_command = {
        "communication_type": "fake_type",
    }
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        invalid_command,
        comm_from_main_queue,
    )

    with pytest.raises(UnrecognizedCommandFromMainToDataAnalyzerError,
                       match="fake_type"):
        invoke_process_run_and_check_errors(p)
Example #17
0
def test_DataAnalyzerProcess_commands_for_each_run_iteration__checks_for_calibration_update_from_main(
    four_board_analyzer_process, ):
    calibration_comm = {
        "communication_type": "calibration",
        "calibration_settings": 1,
    }

    p, _, comm_from_main_queue, _, _ = four_board_analyzer_process
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        calibration_comm,
        comm_from_main_queue,
        timeout_seconds=QUEUE_CHECK_TIMEOUT_SECONDS,
    )
    invoke_process_run_and_check_errors(p)

    actual = p.get_calibration_settings()
    assert actual == calibration_comm["calibration_settings"]
Example #18
0
def set_stim_info_and_start_stimulating(simulator_fixture, stim_info):
    simulator = simulator_fixture["simulator"]
    testing_queue = simulator_fixture["testing_queue"]
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        {
            "command": "set_stim_info",
            "stim_info": stim_info
        }, testing_queue)
    invoke_process_run_and_check_errors(simulator)
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        {
            "command": "set_stim_status",
            "status": True
        }, testing_queue)
    invoke_process_run_and_check_errors(simulator)
    # remove all bytes sent from initial subprotocol status update
    simulator.read_all()
Example #19
0
def test_DataAnalyzerProcess__correctly_pairs_descending_order_ref_sensor_data_in_buffer(
    four_board_analyzer_process, ):
    p, board_queues, comm_from_main_queue, _, _ = four_board_analyzer_process
    incoming_data = board_queues[0][0]

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        get_mutable_copy_of_START_MANAGED_ACQUISITION_COMMUNICATION(),
        comm_from_main_queue,
    )
    invoke_process_run_and_check_errors(p)

    test_ref_data = np.array(
        [
            [
                125, 375, 625, 875, 1125, 1375, 1625, 1875, 2125, 2375, 2625,
                2875
            ],
            [11, 23, 35, 47, 59, 71, 83, 95, 107, 119, 131, 143],
        ],
        dtype=np.int32,
    )
    test_ref_dict = {
        "is_reference_sensor": True,
        "reference_for_wells": REF_INDEX_TO_24_WELL_INDEX[5],
        "data": test_ref_data,
    }
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        test_ref_dict, incoming_data)

    invoke_process_run_and_check_errors(p)

    data_buffer = p._data_buffer  # pylint:disable=protected-access
    expected_ref_data_23 = np.array([[1250, 11250, 21250], [11, 59, 107]],
                                    dtype=np.int32)
    np.testing.assert_equal(data_buffer[23]["ref_data"], expected_ref_data_23)
    expected_ref_data_22 = np.array([[3750, 13750, 23750], [23, 71, 119]],
                                    dtype=np.int32)
    np.testing.assert_equal(data_buffer[22]["ref_data"], expected_ref_data_22)
    expected_ref_data_19 = np.array([[6250, 16250, 26250], [35, 83, 131]],
                                    dtype=np.int32)
    np.testing.assert_equal(data_buffer[19]["ref_data"], expected_ref_data_19)
    expected_ref_data_18 = np.array([[8750, 18750, 28750], [47, 95, 143]],
                                    dtype=np.int32)
    np.testing.assert_equal(data_buffer[18]["ref_data"], expected_ref_data_18)
Example #20
0
def test_DataAnalyzerProcess__correctly_pairs_ascending_order_ref_sensor_data_in_buffer(
    four_board_analyzer_process, ):
    p, board_queues, comm_from_main_queue, _, _ = four_board_analyzer_process
    incoming_data = board_queues[0][0]

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        get_mutable_copy_of_START_MANAGED_ACQUISITION_COMMUNICATION(),
        comm_from_main_queue,
    )
    invoke_process_run_and_check_errors(p)

    test_ref_data = np.array(
        [
            [
                125, 375, 625, 875, 1125, 1375, 1625, 1875, 2125, 2375, 2625,
                2875
            ],
            [6, 18, 30, 42, 54, 66, 78, 90, 102, 114, 126, 138],
        ],
        dtype=np.int32,
    )
    test_ref_dict = {
        "is_reference_sensor": True,
        "reference_for_wells": REF_INDEX_TO_24_WELL_INDEX[0],
        "data": test_ref_data,
    }
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        test_ref_dict, incoming_data)

    invoke_process_run_and_check_errors(p)

    data_buffer = p._data_buffer  # pylint:disable=protected-access
    expected_ref_data_0 = np.array([[1250, 11250, 21250], [6, 54, 102]],
                                   dtype=np.int32)
    np.testing.assert_equal(data_buffer[0]["ref_data"], expected_ref_data_0)
    expected_ref_data_1 = np.array([[3750, 13750, 23750], [18, 66, 114]],
                                   dtype=np.int32)
    np.testing.assert_equal(data_buffer[1]["ref_data"], expected_ref_data_1)
    expected_ref_data_4 = np.array([[6250, 16250, 26250], [30, 78, 126]],
                                   dtype=np.int32)
    np.testing.assert_equal(data_buffer[4]["ref_data"], expected_ref_data_4)
    expected_ref_data_5 = np.array([[8750, 18750, 28750], [42, 90, 138]],
                                   dtype=np.int32)
    np.testing.assert_equal(data_buffer[5]["ref_data"], expected_ref_data_5)
Example #21
0
def test_DataAnalyzerProcess_beta_2_performance__first_second_of_data_with_analysis(
    runnable_four_board_analyzer_process, ):
    # Fill data analysis buffer with 10 seconds of data to start metric analysis,
    # Then record duration of sending 1 additional second of data
    #
    # initial pulse3D import:                             0.334087008
    # pulse3D 0.23.3:                                     0.337370183

    p, board_queues, comm_from_main_queue, comm_to_main_queue, _ = runnable_four_board_analyzer_process
    p._beta_2_mode = True
    p.change_magnetometer_config(
        {
            "magnetometer_config": DEFAULT_MAGNETOMETER_CONFIG,
            "sampling_period": DEFAULT_SAMPLING_PERIOD
        }, )

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        dict(START_MANAGED_ACQUISITION_COMMUNICATION),
        comm_from_main_queue,
    )
    invoke_process_run_and_check_errors(p, perform_setup_before_loop=True)

    # load data
    num_seconds = MIN_NUM_SECONDS_NEEDED_FOR_ANALYSIS + 1
    fill_da_input_data_queue(board_queues[0][0], num_seconds)
    invoke_process_run_and_check_errors(p, num_iterations=num_seconds - 1)

    # send additional data and time analysis
    start = time.perf_counter_ns()
    invoke_process_run_and_check_errors(p)
    dur_seconds = (time.perf_counter_ns() - start) / 10**9

    # prevent BrokenPipeErrors
    drain_queue(board_queues[0][1])
    drain_queue(comm_to_main_queue)

    # print(f"Duration (seconds): {dur_seconds}")  # Eli (4/8/21): this is commented code that is deliberately kept in the codebase since it is often toggled on/off during optimization
    assert dur_seconds < 2
Example #22
0
def test_DataAnalyzerProcess__formats_and_passes_incoming_stim_packet_through_to_main(
        four_board_analyzer_process_beta_2_mode, mocker):
    da_process = four_board_analyzer_process_beta_2_mode["da_process"]
    incoming_data_queue = four_board_analyzer_process_beta_2_mode[
        "board_queues"][0][0]
    outgoing_data_queue = four_board_analyzer_process_beta_2_mode[
        "board_queues"][0][1]

    test_stim_packet = copy.deepcopy(SIMPLE_STIM_DATA_PACKET_FROM_ALL_WELLS)
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        copy.deepcopy(test_stim_packet), incoming_data_queue)
    invoke_process_run_and_check_errors(da_process)

    confirm_queue_is_eventually_of_size(outgoing_data_queue, 1)
    outgoing_msg = outgoing_data_queue.get(timeout=QUEUE_CHECK_TIMEOUT_SECONDS)

    expected_stim_data = {
        well_idx: stim_status_arr.tolist()
        for well_idx, stim_status_arr in
        test_stim_packet["well_statuses"].items()
    }

    assert outgoing_msg["data_type"] == "stimulation"
    assert outgoing_msg["data_json"] == json.dumps(expected_stim_data)
Example #23
0
def test_DataAnalyzerProcess__logs_performance_metrics_after_creating_beta_2_data(
        four_board_analyzer_process_beta_2_mode, mocker):
    da_process = four_board_analyzer_process_beta_2_mode["da_process"]
    to_main_queue = four_board_analyzer_process_beta_2_mode["to_main_queue"]
    from_main_queue = four_board_analyzer_process_beta_2_mode[
        "from_main_queue"]
    board_queues = four_board_analyzer_process_beta_2_mode["board_queues"]

    # perform setup so performance logging values are initialized
    invoke_process_run_and_check_errors(da_process,
                                        perform_setup_before_loop=True)

    # set this to a lower value to speed up the test
    da_process._minimum_iteration_duration_seconds /= 10

    # mock functions to speed up test
    mocker.patch.object(data_analyzer,
                        "get_force_signal",
                        autospec=True,
                        return_value=np.zeros((2, 2)))
    mocker.patch.object(data_analyzer,
                        "peak_detector",
                        autospec=True,
                        side_effect=PeakDetectionError())

    # set magnetometer configuration
    expected_sampling_period_us = 10000
    num_data_points_per_second = MICRO_TO_BASE_CONVERSION // expected_sampling_period_us
    set_magnetometer_config(
        four_board_analyzer_process_beta_2_mode,
        {
            "magnetometer_config": GENERIC_BOARD_MAGNETOMETER_CONFIGURATION,
            "sampling_period": expected_sampling_period_us,
        },
    )
    # start managed acquisition
    start_command = get_mutable_copy_of_START_MANAGED_ACQUISITION_COMMUNICATION(
    )
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        start_command, from_main_queue)
    invoke_process_run_and_check_errors(da_process)
    # remove command receipt
    to_main_queue.get(timeout=QUEUE_CHECK_TIMEOUT_SECONDS)

    # create expected durations for metric creation
    expected_num_data_packets = MIN_NUM_SECONDS_NEEDED_FOR_ANALYSIS
    expected_data_creation_durs = [
        random.uniform(30, 70) for _ in range(expected_num_data_packets)
    ]
    mocker.patch.object(
        data_analyzer,
        "_get_secs_since_data_creation_start",
        autospec=True,
        side_effect=expected_data_creation_durs,
    )
    expected_data_analysis_durs = [random.uniform(20, 80) for _ in range(24)]
    mocker.patch.object(
        data_analyzer,
        "_get_secs_since_data_analysis_start",
        autospec=True,
        side_effect=expected_data_analysis_durs,
    )

    # create test data packets
    for packet_num in range(expected_num_data_packets):
        test_packet = copy.deepcopy(
            SIMPLE_BETA_2_CONSTRUCT_DATA_FROM_ALL_WELLS)
        test_packet["time_indices"] = (np.arange(
            num_data_points_per_second * packet_num,
            num_data_points_per_second * (packet_num + 1),
            dtype=np.int64,
        ) * expected_sampling_period_us)
        put_object_into_queue_and_raise_error_if_eventually_still_empty(
            test_packet, board_queues[0][0])
        invoke_process_run_and_check_errors(da_process)
    confirm_queue_is_eventually_of_size(
        to_main_queue, expected_num_data_packets * 2
    )  # Tanner (1/4/21): a log message is also put into queue after each waveform data dump

    actual = drain_queue(to_main_queue)[-1]["message"]
    assert actual["communication_type"] == "performance_metrics"
    assert actual["data_creation_duration"] == expected_data_creation_durs[-1]
    assert actual["data_creation_duration_metrics"] == {
        "max":
        max(expected_data_creation_durs),
        "min":
        min(expected_data_creation_durs),
        "stdev":
        round(stdev(expected_data_creation_durs), 6),
        "mean":
        round(
            sum(expected_data_creation_durs) /
            len(expected_data_creation_durs), 6),
    }
    assert actual["data_analysis_duration_metrics"] == {
        "max":
        max(expected_data_analysis_durs),
        "min":
        min(expected_data_analysis_durs),
        "stdev":
        round(stdev(expected_data_analysis_durs), 6),
        "mean":
        round(
            sum(expected_data_analysis_durs) /
            len(expected_data_analysis_durs), 6),
    }
    # values created in parent class
    assert "start_timepoint_of_measurements" not in actual
    assert "idle_iteration_time_ns" not in actual
    assert "longest_iterations" in actual
    assert "percent_use" in actual
    assert "percent_use_metrics" in actual

    # prevent BrokenPipeErrors
    drain_queue(board_queues[0][1])
Example #24
0
def test_put_object_into_queue_and_raise_error_if_eventually_still_empty__puts_object_into_queue():
    expected = "bob"
    q = Queue()
    put_object_into_queue_and_raise_error_if_eventually_still_empty(expected, q)
    actual = q.get_nowait()
    assert actual == expected
Example #25
0
def test_DataAnalyzerProcess__sends_outgoing_data_dict_to_main_as_soon_as_it_retrieves_a_data_packet_from_file_writer__and_sends_data_available_message_to_main(
        four_board_analyzer_process_beta_2_mode, mocker):
    da_process = four_board_analyzer_process_beta_2_mode["da_process"]
    from_main_queue = four_board_analyzer_process_beta_2_mode[
        "from_main_queue"]
    to_main_queue = four_board_analyzer_process_beta_2_mode["to_main_queue"]
    incoming_data_queue = four_board_analyzer_process_beta_2_mode[
        "board_queues"][0][0]
    outgoing_data_queue = four_board_analyzer_process_beta_2_mode[
        "board_queues"][0][1]

    spied_fix = mocker.spy(data_analyzer, "fix_dropped_samples")
    # mock so that well metrics don't populate outgoing data queue
    mocker.patch.object(da_process,
                        "_dump_outgoing_well_metrics",
                        autospec=True)
    # mock so performance log messages don't populate queue to main
    mocker.patch.object(da_process,
                        "_handle_performance_logging",
                        autospec=True)

    da_process.init_streams()
    # set config arbitrary sampling period
    test_sampling_period = 1000
    set_magnetometer_config(
        four_board_analyzer_process_beta_2_mode,
        {
            "magnetometer_config": GENERIC_BOARD_MAGNETOMETER_CONFIGURATION,
            "sampling_period": test_sampling_period,
        },
    )

    # start managed_acquisition
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        dict(START_MANAGED_ACQUISITION_COMMUNICATION), from_main_queue)
    invoke_process_run_and_check_errors(da_process)
    confirm_queue_is_eventually_of_size(to_main_queue, 1)
    # remove message to main
    to_main_queue.get(timeout=QUEUE_CHECK_TIMEOUT_SECONDS)

    invoke_process_run_and_check_errors(da_process)
    confirm_queue_is_eventually_empty(outgoing_data_queue)
    confirm_queue_is_eventually_empty(to_main_queue)

    test_data_packet = copy.deepcopy(
        SIMPLE_BETA_2_CONSTRUCT_DATA_FROM_ALL_WELLS)
    # fix time indices so it doesn't create "ZeroDivisionError: float division"
    test_data_packet["time_indices"] = np.arange(
        test_data_packet["time_indices"].shape[0])
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        copy.deepcopy(test_data_packet), incoming_data_queue)

    invoke_process_run_and_check_errors(da_process)
    assert spied_fix.call_count == 24
    confirm_queue_is_eventually_of_size(outgoing_data_queue, 1)
    confirm_queue_is_eventually_of_size(to_main_queue, 1)

    # test data dump
    waveform_data_points = dict()
    filter_coefficients = create_filter(BUTTERWORTH_LOWPASS_30_UUID,
                                        test_sampling_period)
    for well_idx in range(24):
        default_channel_data = test_data_packet[well_idx][
            SERIAL_COMM_DEFAULT_DATA_CHANNEL]
        fixed_default_channel_data = fix_dropped_samples(default_channel_data)
        flipped_default_channel_data = (
            (fixed_default_channel_data.astype(np.int32) -
             max(fixed_default_channel_data)) * -1 +
            MEMSIC_CENTER_OFFSET).astype(np.uint16)
        compressed_data = get_force_signal(
            np.array([
                test_data_packet["time_indices"], flipped_default_channel_data
            ], np.int64),
            filter_coefficients,
        )
        waveform_data_points[well_idx] = {
            "x_data_points":
            compressed_data[0].tolist(),
            "y_data_points":
            (compressed_data[1] * MICRO_TO_BASE_CONVERSION).tolist(),
        }
    expected_outgoing_dict = {
        "waveform_data": {
            "basic_data": {
                "waveform_data_points": waveform_data_points
            }
        },
        "earliest_timepoint": test_data_packet["time_indices"][0].item(),
        "latest_timepoint": test_data_packet["time_indices"][-1].item(),
        "num_data_points": len(test_data_packet["time_indices"]),
    }

    outgoing_msg = outgoing_data_queue.get(timeout=QUEUE_CHECK_TIMEOUT_SECONDS)
    assert outgoing_msg["data_type"] == "waveform_data"
    assert outgoing_msg["data_json"] == json.dumps(expected_outgoing_dict)
    # test message sent to main
    outgoing_msg = to_main_queue.get(timeout=QUEUE_CHECK_TIMEOUT_SECONDS)
    expected_msg = {
        "communication_type": "data_available",
        "timestamp": "2021-06-15 16:39:10.120589",
        "num_data_points": len(test_data_packet["time_indices"]),
        "earliest_timepoint": test_data_packet["time_indices"][0],
        "latest_timepoint": test_data_packet["time_indices"][-1],
    }
    assert outgoing_msg == expected_msg
Example #26
0
def test_DataAnalyzerProcess__does_not_process_data_packets_after_receiving_stop_managed_acquisition_command_until_receiving_first_packet_of_new_stream(
        four_board_analyzer_process_beta_2_mode, mocker):
    da_process = four_board_analyzer_process_beta_2_mode["da_process"]
    from_main_queue = four_board_analyzer_process_beta_2_mode[
        "from_main_queue"]
    to_main_queue = four_board_analyzer_process_beta_2_mode["to_main_queue"]
    incoming_data_queue = four_board_analyzer_process_beta_2_mode[
        "board_queues"][0][0]

    # mock so these since not using real data
    mocked_process_data = mocker.patch.object(da_process,
                                              "_process_beta_2_data",
                                              autospec=True,
                                              return_value={})

    invoke_process_run_and_check_errors(da_process,
                                        perform_setup_before_loop=True)
    # set config arbitrary sampling period
    test_sampling_period = 10000
    set_magnetometer_config(
        four_board_analyzer_process_beta_2_mode,
        {
            "magnetometer_config": GENERIC_BOARD_MAGNETOMETER_CONFIGURATION,
            "sampling_period": test_sampling_period,
        },
    )

    # start managed_acquisition
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        dict(START_MANAGED_ACQUISITION_COMMUNICATION), from_main_queue)
    invoke_process_run_and_check_errors(da_process)
    # send first packet of first stream and make sure it is processed
    test_data_packet = copy.deepcopy(
        SIMPLE_BETA_2_CONSTRUCT_DATA_FROM_ALL_WELLS)
    test_data_packet["is_first_packet_of_stream"] = True
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        test_data_packet, incoming_data_queue)
    invoke_process_run_and_check_errors(da_process)
    assert mocked_process_data.call_count == 1
    # send another packet of first stream and make sure it is processed
    test_data_packet = copy.deepcopy(
        SIMPLE_BETA_2_CONSTRUCT_DATA_FROM_ALL_WELLS)
    test_data_packet["is_first_packet_of_stream"] = False
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        test_data_packet, incoming_data_queue)
    invoke_process_run_and_check_errors(da_process)
    assert mocked_process_data.call_count == 2

    # stop managed acquisition and make sure next data packet in the first stream is not processed
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        dict(STOP_MANAGED_ACQUISITION_COMMUNICATION), from_main_queue)
    invoke_process_run_and_check_errors(da_process)
    test_data_packet = copy.deepcopy(
        SIMPLE_BETA_2_CONSTRUCT_DATA_FROM_ALL_WELLS)
    test_data_packet["is_first_packet_of_stream"] = False
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        test_data_packet, incoming_data_queue)
    invoke_process_run_and_check_errors(da_process)
    assert mocked_process_data.call_count == 2

    # start managed acquisition again and make sure next data packet in the first stream is not processed
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        dict(START_MANAGED_ACQUISITION_COMMUNICATION), from_main_queue)
    invoke_process_run_and_check_errors(da_process)
    test_data_packet = copy.deepcopy(
        SIMPLE_BETA_2_CONSTRUCT_DATA_FROM_ALL_WELLS)
    test_data_packet["is_first_packet_of_stream"] = False
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        test_data_packet, incoming_data_queue)
    invoke_process_run_and_check_errors(da_process)
    assert mocked_process_data.call_count == 2

    # send first data packet from second stream and make sure it is processed
    test_data_packet = copy.deepcopy(
        SIMPLE_BETA_2_CONSTRUCT_DATA_FROM_ALL_WELLS)
    test_data_packet["is_first_packet_of_stream"] = True
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        test_data_packet, incoming_data_queue)
    invoke_process_run_and_check_errors(da_process)
    assert mocked_process_data.call_count == 3

    # prevent BrokenPipeErrors
    drain_queue(to_main_queue)