Exemplo n.º 1
0
def test_FileWriterProcess__process_next_data_packet__does_not_write_tissue_data_if_data_chunk_is_all_before_the_timestamp_idx(
    four_board_file_writer_process, ):
    file_writer_process = four_board_file_writer_process["fw_process"]
    board_queues = four_board_file_writer_process["board_queues"]
    from_main_queue = four_board_file_writer_process["from_main_queue"]
    file_dir = four_board_file_writer_process["file_dir"]

    this_command = copy.deepcopy(GENERIC_BETA_1_START_RECORDING_COMMAND)
    this_command["active_well_indices"] = [4]
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        this_command,
        from_main_queue,
    )
    num_data_points = 5
    data = np.zeros((2, num_data_points), dtype=np.int32)

    for this_index in range(num_data_points):
        data[0, this_index] = (
            this_command["timepoint_to_begin_recording_at"] +
            (this_index - 25) * CONSTRUCT_SENSOR_SAMPLING_PERIOD)
        data[1, this_index] = this_index * 2

    this_data_packet = copy.deepcopy(GENERIC_TISSUE_DATA_PACKET)
    this_data_packet["data"] = data

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        this_data_packet,
        board_queues[0][0],
    )
    invoke_process_run_and_check_errors(file_writer_process)

    actual_file = open_the_generic_h5_file(file_dir)
    assert str(UTC_FIRST_TISSUE_DATA_POINT_UUID) not in actual_file.attrs
    actual_tissue_data = get_tissue_dataset_from_file(actual_file)
    assert actual_tissue_data.shape == (0, )
Exemplo n.º 2
0
def test_FileWriterProcess__process_next_data_packet__writes_tissue_data_for_two_packets_when_the_timestamp_idx_starts_part_way_through_the_first_packet__and_sets_timestamp_metadata_for_tissue_since_this_is_first_piece_of_data(
    four_board_file_writer_process, ):
    file_writer_process = four_board_file_writer_process["fw_process"]
    board_queues = four_board_file_writer_process["board_queues"]
    from_main_queue = four_board_file_writer_process["from_main_queue"]
    file_dir = four_board_file_writer_process["file_dir"]

    this_command = copy.deepcopy(GENERIC_BETA_1_START_RECORDING_COMMAND)
    this_command["active_well_indices"] = [4]
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        this_command,
        from_main_queue,
    )
    num_data_points = 75
    data = np.zeros((2, num_data_points), dtype=np.int32)

    for this_index in range(num_data_points):
        data[0, this_index] = (
            this_command["timepoint_to_begin_recording_at"] +
            (this_index - 30) * CONSTRUCT_SENSOR_SAMPLING_PERIOD +
            DATA_FRAME_PERIOD)
        data[1, this_index] = this_index * 2

    this_data_packet = copy.deepcopy(GENERIC_TISSUE_DATA_PACKET)
    this_data_packet["data"] = data

    num_data_points = 15
    next_data = np.zeros((2, num_data_points), dtype=np.int32)
    for this_index in range(num_data_points):
        next_data[0, this_index] = data[
            0, -1] + (this_index + 1) * CONSTRUCT_SENSOR_SAMPLING_PERIOD
        next_data[1, this_index] = this_index * 2 + 1000

    next_data_packet = copy.deepcopy(GENERIC_TISSUE_DATA_PACKET)
    next_data_packet["data"] = next_data

    board_queues[0][0].put_nowait(this_data_packet)
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        next_data_packet,
        board_queues[0][0],
    )
    invoke_process_run_and_check_errors(file_writer_process, num_iterations=2)

    actual_file = open_the_generic_h5_file(file_dir)
    expected_timestamp = this_command[
        "metadata_to_copy_onto_main_file_attributes"][
            UTC_BEGINNING_DATA_ACQUISTION_UUID] + datetime.timedelta(
                seconds=(this_command["timepoint_to_begin_recording_at"] +
                         DATA_FRAME_PERIOD) / CENTIMILLISECONDS_PER_SECOND)
    assert actual_file.attrs[str(
        UTC_FIRST_TISSUE_DATA_POINT_UUID)] == expected_timestamp.strftime(
            "%Y-%m-%d %H:%M:%S.%f")
    actual_tissue_data = get_tissue_dataset_from_file(actual_file)
    assert actual_tissue_data.shape == (60, )
    assert actual_tissue_data[0] == 60
    assert actual_tissue_data[-1] == 1028
Exemplo n.º 3
0
def test_FileWriterProcess_process_magnetometer_data_packet__writes_data_if_the_timestamp_idx_starts_part_way_through_the_chunk__and_sets_timestamp_metadata_for_tissue_since_this_is_first_piece_of_data(
    four_board_file_writer_process, ):
    fw_process = four_board_file_writer_process["fw_process"]
    fw_process.set_beta_2_mode()
    populate_calibration_folder(fw_process)
    board_queues = four_board_file_writer_process["board_queues"]
    from_main_queue = four_board_file_writer_process["from_main_queue"]
    file_dir = four_board_file_writer_process["file_dir"]

    start_recording_command = copy.deepcopy(
        GENERIC_BETA_2_START_RECORDING_COMMAND)
    start_recording_command["active_well_indices"] = [4]
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        start_recording_command, from_main_queue)
    total_num_data_points = 75
    num_recorded_data_points = 50
    time_index_offset = total_num_data_points - num_recorded_data_points
    start_timepoint = start_recording_command[
        "timepoint_to_begin_recording_at"] - time_index_offset
    test_data_packet = create_simple_data_packet(
        start_timepoint,
        0,
        start_recording_command["active_well_indices"],
        total_num_data_points,
    )

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        test_data_packet, board_queues[0][0])
    invoke_process_run_and_check_errors(fw_process)

    expected_timestamp = start_recording_command[
        "metadata_to_copy_onto_main_file_attributes"][
            UTC_BEGINNING_DATA_ACQUISTION_UUID] + datetime.timedelta(seconds=(
                start_recording_command["timepoint_to_begin_recording_at"]
            ) / MICRO_TO_BASE_CONVERSION)

    this_file = open_the_generic_h5_file(file_dir, beta_version=2)
    assert this_file.attrs[str(
        UTC_FIRST_TISSUE_DATA_POINT_UUID)] == expected_timestamp.strftime(
            "%Y-%m-%d %H:%M:%S.%f")
    actual_time_index_data = get_time_index_dataset_from_file(this_file)
    assert actual_time_index_data.shape == (num_recorded_data_points, )
    assert actual_time_index_data[0] == start_timepoint + time_index_offset
    actual_time_offset_data = get_time_offset_dataset_from_file(this_file)
    assert actual_time_offset_data.shape == (GENERIC_NUM_SENSORS_ENABLED,
                                             num_recorded_data_points)
    assert actual_time_offset_data[0, 9] == (9 + time_index_offset) * 2
    assert actual_time_offset_data[1, 6] == (6 + time_index_offset) * 2
    actual_tissue_data = get_tissue_dataset_from_file(this_file)
    assert actual_tissue_data.shape == (GENERIC_NUM_CHANNELS_ENABLED,
                                        num_recorded_data_points)
    assert actual_tissue_data[0, 9] == (9 + time_index_offset) * 3
    assert actual_tissue_data[1, 6] == (6 + time_index_offset) * 4
    # close file to avoid issues on Windows
    this_file.close()
Exemplo n.º 4
0
def test_FileWriterProcess__process_next_data_packet__writes_tissue_data_if_the_whole_data_chunk_is_at_the_timestamp_idx__and_sets_timestamp_metadata_for_tissue_since_this_is_first_piece_of_data(
    four_board_file_writer_process, ):
    file_writer_process = four_board_file_writer_process["fw_process"]
    board_queues = four_board_file_writer_process["board_queues"]
    from_main_queue = four_board_file_writer_process["from_main_queue"]
    file_dir = four_board_file_writer_process["file_dir"]

    this_command = copy.deepcopy(GENERIC_BETA_1_START_RECORDING_COMMAND)
    this_command["active_well_indices"] = [3]
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        this_command,
        from_main_queue,
    )
    num_data_points = 50
    data = np.zeros((2, num_data_points), dtype=np.int32)
    for this_index in range(num_data_points):
        data[0,
             this_index] = (this_command["timepoint_to_begin_recording_at"] +
                            this_index * CONSTRUCT_SENSOR_SAMPLING_PERIOD)
        data[1, this_index] = this_index * 2
    this_data_packet = copy.deepcopy(GENERIC_TISSUE_DATA_PACKET)
    this_data_packet["well_index"] = 3
    this_data_packet["data"] = data

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        this_data_packet,
        board_queues[0][0],
    )
    invoke_process_run_and_check_errors(file_writer_process)
    actual_file = open_the_generic_h5_file(file_dir, well_name="D1")

    expected_timestamp = this_command[
        "metadata_to_copy_onto_main_file_attributes"][
            UTC_BEGINNING_DATA_ACQUISTION_UUID] + datetime.timedelta(
                seconds=this_command["timepoint_to_begin_recording_at"] /
                CENTIMILLISECONDS_PER_SECOND)
    assert actual_file.attrs[str(
        UTC_FIRST_TISSUE_DATA_POINT_UUID)] == expected_timestamp.strftime(
            "%Y-%m-%d %H:%M:%S.%f")
    actual_tissue_data = get_tissue_dataset_from_file(actual_file)
    assert actual_tissue_data.shape == (50, )
    assert actual_tissue_data[3] == 6
    assert actual_tissue_data[9] == 18
Exemplo n.º 5
0
def test_FileWriterProcess_process_magnetometer_data_packet__does_not_write_data_if_data_chunk_is_all_before_the_timestamp_idx(
    four_board_file_writer_process, ):
    fw_process = four_board_file_writer_process["fw_process"]
    fw_process.set_beta_2_mode()
    populate_calibration_folder(fw_process)
    board_queues = four_board_file_writer_process["board_queues"]
    from_main_queue = four_board_file_writer_process["from_main_queue"]
    file_dir = four_board_file_writer_process["file_dir"]

    start_recording_command = copy.deepcopy(
        GENERIC_BETA_2_START_RECORDING_COMMAND)
    start_recording_command["active_well_indices"] = [4]
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        start_recording_command, from_main_queue)
    num_data_points = 30
    start_timepoint = start_recording_command[
        "timepoint_to_begin_recording_at"] - num_data_points
    test_data_packet = create_simple_data_packet(
        start_timepoint,
        0,
        start_recording_command["active_well_indices"],
        num_data_points,
    )

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        test_data_packet, board_queues[0][0])
    invoke_process_run_and_check_errors(fw_process)

    this_file = open_the_generic_h5_file(file_dir, beta_version=2)
    assert str(UTC_FIRST_TISSUE_DATA_POINT_UUID) not in this_file.attrs
    actual_time_index_data = get_time_index_dataset_from_file(this_file)
    assert actual_time_index_data.shape == (0, )
    actual_time_offset_data = get_time_offset_dataset_from_file(this_file)
    assert actual_time_offset_data.shape == (GENERIC_NUM_SENSORS_ENABLED, 0)
    actual_tissue_data = get_tissue_dataset_from_file(this_file)
    assert actual_tissue_data.shape == (GENERIC_NUM_CHANNELS_ENABLED, 0)
    # close file to avoid issues on Windows
    this_file.close()
Exemplo n.º 6
0
def test_FileWriterProcess__process_next_data_packet__adds_a_data_packet_before_the_stop_recording_timepoint__and_does_not_set_tissue_finalization_status_to_true__if_data_packet_is_completely_before_timepoint(
    four_board_file_writer_process, ):
    file_writer_process = four_board_file_writer_process["fw_process"]
    board_queues = four_board_file_writer_process["board_queues"]
    from_main_queue = four_board_file_writer_process["from_main_queue"]
    file_dir = four_board_file_writer_process["file_dir"]

    start_command = copy.deepcopy(GENERIC_BETA_1_START_RECORDING_COMMAND)
    start_command["active_well_indices"] = [4]
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        start_command,
        from_main_queue,
    )

    num_data_points = 10
    data = np.zeros((2, num_data_points), dtype=np.int32)

    for this_index in range(num_data_points):
        data[0,
             this_index] = (start_command["timepoint_to_begin_recording_at"] +
                            this_index * CONSTRUCT_SENSOR_SAMPLING_PERIOD +
                            DATA_FRAME_PERIOD)
        data[1, this_index] = this_index * 2

    this_data_packet = copy.deepcopy(GENERIC_TISSUE_DATA_PACKET)
    this_data_packet["data"] = data

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        this_data_packet,
        board_queues[0][0],
    )
    invoke_process_run_and_check_errors(file_writer_process)

    actual_file = open_the_generic_h5_file(file_dir)

    # confirm some data already recorded to file
    actual_tissue_data = get_tissue_dataset_from_file(actual_file)
    assert actual_tissue_data.shape == (10, )
    assert actual_tissue_data[9] == 18
    assert actual_tissue_data[3] == 6

    stop_command = copy.deepcopy(GENERIC_STOP_RECORDING_COMMAND)

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        stop_command,
        from_main_queue,
    )
    data_before_stop = np.zeros((2, num_data_points), dtype=np.int32)
    for this_index in range(num_data_points):
        data_before_stop[0, this_index] = (
            stop_command["timepoint_to_stop_recording_at"] +
            (this_index - 10) * CONSTRUCT_SENSOR_SAMPLING_PERIOD)
        data_before_stop[1, this_index] = this_index * 5
    this_data_packet["data"] = data_before_stop

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        this_data_packet,
        board_queues[0][0],
    )
    invoke_process_run_and_check_errors(file_writer_process)

    # confirm additional data added to file
    actual_data = get_tissue_dataset_from_file(actual_file)
    assert actual_data.shape == (20, )
    assert actual_data[11] == 5
    assert actual_data[14] == 20

    tissue_status, _ = file_writer_process.get_recording_finalization_statuses(
    )
    assert tissue_status[0][4] is False
Exemplo n.º 7
0
def test_FileWriterProcess_hard_stop__closes_all_beta_2_files_after_stop_recording_before_all_files_are_finalized__and_files_can_be_opened_after_process_stops(
    four_board_file_writer_process, mocker
):
    expected_timestamp = "2020_02_09_190359"
    expected_barcode = GENERIC_BETA_2_START_RECORDING_COMMAND["metadata_to_copy_onto_main_file_attributes"][
        PLATE_BARCODE_UUID
    ]

    fw_process = four_board_file_writer_process["fw_process"]
    fw_process.set_beta_2_mode()
    populate_calibration_folder(fw_process)
    board_queues = four_board_file_writer_process["board_queues"]
    from_main_queue = four_board_file_writer_process["from_main_queue"]
    tmp_dir = four_board_file_writer_process["file_dir"]

    spied_close_all_files = mocker.spy(fw_process, "close_all_files")

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        copy.deepcopy(GENERIC_BETA_2_START_RECORDING_COMMAND), from_main_queue
    )
    invoke_process_run_and_check_errors(fw_process)

    # fill files with data
    test_num_data_points = 50
    start_timepoint = GENERIC_BETA_2_START_RECORDING_COMMAND["timepoint_to_begin_recording_at"]
    test_data = np.zeros(test_num_data_points, dtype=np.uint16)
    data_packet = {
        "data_type": "magnetometer",
        "time_indices": np.arange(start_timepoint, start_timepoint + test_num_data_points, dtype=np.uint64),
        "is_first_packet_of_stream": False,
    }
    for well_idx in range(24):
        channel_dict = {
            "time_offsets": np.zeros((GENERIC_NUM_SENSORS_ENABLED, test_num_data_points), dtype=np.uint16),
            SERIAL_COMM_SENSOR_AXIS_LOOKUP_TABLE["A"]["X"]: test_data,
            SERIAL_COMM_SENSOR_AXIS_LOOKUP_TABLE["C"]["Z"]: test_data,
        }
        data_packet[well_idx] = channel_dict
    board_queues[0][0].put_nowait(data_packet)
    confirm_queue_is_eventually_of_size(board_queues[0][0], 1)

    invoke_process_run_and_check_errors(fw_process)
    confirm_queue_is_eventually_empty(board_queues[0][0])

    stop_recording_command = copy.deepcopy(GENERIC_STOP_RECORDING_COMMAND)
    put_object_into_queue_and_raise_error_if_eventually_still_empty(stop_recording_command, from_main_queue)
    invoke_process_run_and_check_errors(fw_process)

    assert spied_close_all_files.call_count == 0  # confirm precondition
    fw_process.hard_stop()
    spied_close_all_files.assert_called_once()

    # confirm files can be opened and files contains at least one piece of metadata and the correct tissue data
    for row_idx in range(4):
        for col_idx in range(6):
            well_name = WELL_DEF_24.get_well_name_from_row_and_column(row_idx, col_idx)
            with h5py.File(
                os.path.join(
                    tmp_dir,
                    f"{expected_barcode}__{expected_timestamp}",
                    f"{expected_barcode}__{expected_timestamp}__{well_name}.h5",
                ),
                "r",
            ) as this_file:
                assert (
                    str(START_RECORDING_TIME_INDEX_UUID) in this_file.attrs
                ), f"START_RECORDING_TIME_INDEX_UUID missing for Well {well_name}"
                assert get_time_index_dataset_from_file(this_file).shape == (
                    test_num_data_points,
                ), f"Incorrect time index data shape for Well {well_name}"
                assert get_time_offset_dataset_from_file(this_file).shape == (
                    GENERIC_NUM_SENSORS_ENABLED,
                    test_num_data_points,
                ), f"Incorrect time offset data shape for Well {well_name}"
                assert get_tissue_dataset_from_file(this_file).shape == (
                    GENERIC_NUM_CHANNELS_ENABLED,
                    test_num_data_points,
                ), f"Incorrect tissue data shape for Well {well_name}"
Exemplo n.º 8
0
def test_FileWriterProcess_hard_stop__closes_all_beta_1_files_after_stop_recording_before_all_files_are_finalized__and_files_can_be_opened_after_process_stops(
    four_board_file_writer_process, mocker
):
    expected_timestamp = "2020_02_09_190935"
    expected_barcode = GENERIC_BETA_1_START_RECORDING_COMMAND["metadata_to_copy_onto_main_file_attributes"][
        PLATE_BARCODE_UUID
    ]

    fw_process = four_board_file_writer_process["fw_process"]
    board_queues = four_board_file_writer_process["board_queues"]
    from_main_queue = four_board_file_writer_process["from_main_queue"]
    tmp_dir = four_board_file_writer_process["file_dir"]

    spied_close_all_files = mocker.spy(fw_process, "close_all_files")

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        copy.deepcopy(GENERIC_BETA_1_START_RECORDING_COMMAND), from_main_queue
    )
    invoke_process_run_and_check_errors(fw_process)

    # fill files with data
    start_timepoint = GENERIC_BETA_1_START_RECORDING_COMMAND["timepoint_to_begin_recording_at"]
    test_data = np.array([[start_timepoint], [0]], dtype=np.int32)
    for i in range(24):
        tissue_data_packet = {
            "well_index": i,
            "is_reference_sensor": False,
            "data": test_data,
        }
        board_queues[0][0].put_nowait(tissue_data_packet)
    for i in range(6):
        ref_data_packet = {
            "reference_for_wells": REF_INDEX_TO_24_WELL_INDEX[i],
            "is_reference_sensor": True,
            "data": test_data,
        }
        board_queues[0][0].put_nowait(ref_data_packet)
    confirm_queue_is_eventually_of_size(board_queues[0][0], 30)

    # set to 0 to speed up test
    fw_process._minimum_iteration_duration_seconds = 0  # pylint: disable=protected-access
    invoke_process_run_and_check_errors(fw_process, num_iterations=30)
    confirm_queue_is_eventually_empty(board_queues[0][0])

    stop_recording_command = copy.deepcopy(GENERIC_STOP_RECORDING_COMMAND)
    put_object_into_queue_and_raise_error_if_eventually_still_empty(stop_recording_command, from_main_queue)
    invoke_process_run_and_check_errors(fw_process)

    assert spied_close_all_files.call_count == 0  # confirm precondition
    fw_process.hard_stop()
    spied_close_all_files.assert_called_once()

    # confirm files can be opened and files contains at least one piece of metadata and the correct tissue data
    for row_idx in range(4):
        for col_idx in range(6):
            well_name = WELL_DEF_24.get_well_name_from_row_and_column(row_idx, col_idx)
            with h5py.File(
                os.path.join(
                    tmp_dir,
                    f"{expected_barcode}__{expected_timestamp}",
                    f"{expected_barcode}__{expected_timestamp}__{well_name}.h5",
                ),
                "r",
            ) as this_file:
                assert (
                    str(START_RECORDING_TIME_INDEX_UUID) in this_file.attrs
                ), f"START_RECORDING_TIME_INDEX_UUID missing for Well {well_name}"
                assert get_tissue_dataset_from_file(this_file).shape == (
                    test_data.shape[1],
                ), f"Incorrect tissue data shape for Well {well_name}"
Exemplo n.º 9
0
def test_FileWriterProcess_process_magnetometer_data_packet__adds_a_data_packet_completely_before_the_stop_recording_timepoint__and_does_not_set_data_finalization_status_to_true(
    four_board_file_writer_process, ):
    fw_process = four_board_file_writer_process["fw_process"]
    fw_process.set_beta_2_mode()
    populate_calibration_folder(fw_process)
    board_queues = four_board_file_writer_process["board_queues"]
    from_main_queue = four_board_file_writer_process["from_main_queue"]
    file_dir = four_board_file_writer_process["file_dir"]

    start_recording_command = copy.deepcopy(
        GENERIC_BETA_2_START_RECORDING_COMMAND)
    start_recording_command["active_well_indices"] = [4]
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        start_recording_command, from_main_queue)

    stop_command = copy.deepcopy(GENERIC_STOP_RECORDING_COMMAND)

    num_data_points_1 = 26
    start_timepoint_1 = start_recording_command[
        "timepoint_to_begin_recording_at"]
    test_data_packet_1 = create_simple_data_packet(
        start_timepoint_1,
        0,
        start_recording_command["active_well_indices"],
        num_data_points_1,
    )
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        test_data_packet_1, board_queues[0][0])
    invoke_process_run_and_check_errors(fw_process)

    this_file = open_the_generic_h5_file(file_dir, beta_version=2)
    # confirm some data already recorded to file
    actual_time_index_data = get_time_index_dataset_from_file(this_file)
    assert actual_time_index_data.shape == (num_data_points_1, )
    assert actual_time_index_data[7] == start_timepoint_1 + 7
    actual_time_offset_data = get_time_offset_dataset_from_file(this_file)
    assert actual_time_offset_data.shape == (GENERIC_NUM_SENSORS_ENABLED,
                                             num_data_points_1)
    assert actual_time_offset_data[0, 15] == 15 * 2
    assert actual_time_offset_data[1, 5] == 5 * 2
    actual_tissue_data = get_tissue_dataset_from_file(this_file)
    assert actual_tissue_data.shape == (GENERIC_NUM_CHANNELS_ENABLED,
                                        num_data_points_1)
    assert actual_tissue_data[0, 15] == 15 * 3
    assert actual_tissue_data[1, 5] == 5 * 4

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        stop_command, from_main_queue)

    num_data_points_2 = 24
    start_timepoint_2 = stop_command[
        "timepoint_to_stop_recording_at"] - num_data_points_2
    test_data_packet_2 = create_simple_data_packet(
        start_timepoint_2,
        num_data_points_1,
        start_recording_command["active_well_indices"],
        num_data_points_2,
    )
    assert test_data_packet_2["time_indices"][
        -1] == stop_command["timepoint_to_stop_recording_at"] - 1
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        test_data_packet_2, board_queues[0][0])
    invoke_process_run_and_check_errors(fw_process)

    total_num_data_points = num_data_points_1 + num_data_points_2
    # confirm additional data added to file
    actual_time_index_data = get_time_index_dataset_from_file(this_file)
    assert actual_time_index_data.shape == (total_num_data_points, )
    assert actual_time_index_data[
        -1] == stop_command["timepoint_to_stop_recording_at"] - 1
    actual_time_offset_data = get_time_offset_dataset_from_file(this_file)
    assert actual_time_offset_data.shape == (GENERIC_NUM_SENSORS_ENABLED,
                                             total_num_data_points)
    assert actual_time_offset_data[0, 11] == 11 * 2
    assert actual_time_offset_data[1, 14] == 14 * 2
    actual_tissue_data = get_tissue_dataset_from_file(this_file)
    assert actual_tissue_data.shape == (GENERIC_NUM_CHANNELS_ENABLED,
                                        total_num_data_points)
    assert actual_tissue_data[0, 11] == 11 * 3
    assert actual_tissue_data[1, 14] == 14 * 4
    # TODO Tanner (5/19/21): add assertion about reference data once it is added to Beta 2 files

    tissue_status, _ = fw_process.get_recording_finalization_statuses()
    assert tissue_status[0][4] is False
    # close file to avoid issues on Windows
    this_file.close()
Exemplo n.º 10
0
def test_FileWriterProcess_process_magnetometer_data_packet__does_not_add_a_data_packet_completely_after_the_stop_recording_timepoint__and_sets_data_finalization_status_to_true(
    four_board_file_writer_process, ):
    fw_process = four_board_file_writer_process["fw_process"]
    fw_process.set_beta_2_mode()
    populate_calibration_folder(fw_process)
    board_queues = four_board_file_writer_process["board_queues"]
    from_main_queue = four_board_file_writer_process["from_main_queue"]
    file_dir = four_board_file_writer_process["file_dir"]

    start_recording_command = copy.deepcopy(
        GENERIC_BETA_2_START_RECORDING_COMMAND)
    start_recording_command["active_well_indices"] = [4]
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        start_recording_command, from_main_queue)
    num_recorded_data_points = 10
    start_timepoint = start_recording_command[
        "timepoint_to_begin_recording_at"]
    recorded_data_packet = create_simple_data_packet(
        start_timepoint,
        0,
        start_recording_command["active_well_indices"],
        num_recorded_data_points,
    )

    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        recorded_data_packet, board_queues[0][0])
    invoke_process_run_and_check_errors(fw_process)

    this_file = open_the_generic_h5_file(file_dir, beta_version=2)
    # confirm some data already recorded to file
    actual_time_index_data = get_time_index_dataset_from_file(this_file)
    assert actual_time_index_data.shape == (num_recorded_data_points, )
    assert actual_time_index_data[
        -1] == start_timepoint + num_recorded_data_points - 1
    actual_time_offset_data = get_time_offset_dataset_from_file(this_file)
    assert actual_time_offset_data.shape == (GENERIC_NUM_SENSORS_ENABLED,
                                             num_recorded_data_points)
    assert actual_time_offset_data[0, 0] == 0
    assert actual_time_offset_data[1, -1] == (num_recorded_data_points - 1) * 2
    actual_tissue_data = get_tissue_dataset_from_file(this_file)
    assert actual_tissue_data.shape == (GENERIC_NUM_CHANNELS_ENABLED,
                                        num_recorded_data_points)
    assert actual_tissue_data[0, 0] == 0
    assert actual_tissue_data[1, -1] == (num_recorded_data_points - 1) * 4

    stop_command = copy.deepcopy(GENERIC_STOP_RECORDING_COMMAND)
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        stop_command, from_main_queue)

    ignored_data_packet = create_simple_data_packet(
        stop_command["timepoint_to_stop_recording_at"],
        num_recorded_data_points,
        start_recording_command["active_well_indices"],
        15,
    )
    put_object_into_queue_and_raise_error_if_eventually_still_empty(
        ignored_data_packet, board_queues[0][0])
    invoke_process_run_and_check_errors(fw_process)

    # confirm no additional data added to file
    actual_time_index_data = get_time_index_dataset_from_file(this_file)
    assert actual_time_index_data.shape == (num_recorded_data_points, )
    actual_time_offset_data = get_time_offset_dataset_from_file(this_file)
    assert actual_time_offset_data.shape == (GENERIC_NUM_SENSORS_ENABLED,
                                             num_recorded_data_points)
    actual_tissue_data = get_tissue_dataset_from_file(this_file)
    assert actual_tissue_data.shape == (GENERIC_NUM_CHANNELS_ENABLED,
                                        num_recorded_data_points)
    # TODO Tanner (5/19/21): add assertion about reference data once it is added to Beta 2 files

    tissue_status, _ = fw_process.get_recording_finalization_statuses()
    assert tissue_status[0][4] is True
    # close file to avoid issues on Windows
    this_file.close()