Example #1
0
def check_positive_data_length(
        tdms_operator: nptdms.TdmsFile) -> either.Either:
    """Checks whether the data length is positive"""
    max_length = tdms_helpers.get_maximum_array_size(tdms_operator)
    if max_length <= 0:
        return either.Left(ErrorCode.DATALENGTH_NONPOSITIVE)
    return either.Right(tdms_operator)
Example #2
0
def test_correct_array_size_works_for_example(tmpdir):
    meta = source.MetaData(
        chunk_size=6,
        recurrence_size=3,  # <- 2 would be correct
        recurrence_distance=3,
        consistency_sample_size=10,
    )
    example_source = source.SourceFile.read_from_path(
        tdms_path="tests/assets/example_file.tdms",
        meta=meta,
    )
    res = tdms_helpers.get_maximum_array_size(example_source.tdms_operator)
    assert res == 19
Example #3
0
def prepare_data_correction(
    source_file: source.SourceFile, ) -> List[Tuple[int, int]]:
    """Prepares all parameters needed for data correction process.

    Arguments:
    source_file: Container object for the tdms file with params

    Returns:
    List of Chunk Indices that point to valid data slices.
    """
    maximum_size = tdms_helpers.get_maximum_array_size(
        source_file.tdms_operator)
    index_ranges = calculate_index_ranges_to_preserve(
        source_file.meta.chunk_size,
        source_file.meta.recurrence_size,
        maximum_size,
    )
    return index_ranges
Example #4
0
def calculate_drop_indices(
    source_file: source.SourceFile, ) -> List[Tuple[int, int]]:
    """Calculates index positions of duplicates in file and returns them in the form of tuples
    (offset, length)

    This function is the counter part of
    fix.calculate_index_ranges_to_preserve and is only used in error handling.

    Arguments:
    source_file: Container object for the tdms file with params

    Returns:
    List of Chunk Indices that point to invalid data slices.
    """
    len_data = tdms_helpers.get_maximum_array_size(source_file.tdms_operator)
    chunk_size = source_file.meta.chunk_size
    recurrence_size = source_file.meta.recurrence_size

    offsets = np.arange(chunk_size, len_data, chunk_size + recurrence_size)
    lengths = [recurrence_size] * (len(offsets) - 1)
    lengths.append(min(recurrence_size, len_data - offsets[-1]))

    return list(zip(offsets, lengths))