Beispiel #1
0
def test_left_identity(i: int):
    """Checks whether the return/pure function is a left-identity for bind
    """

    either_instances = [either.Left, either.Right]
    basic_test_functions = [lambda x: 2 * x, lambda x: x + 1]
    monadic_test_functions = [
        lambda x: e(f(x))
        for e, f in product(either_instances, basic_test_functions)
    ]

    for f in monadic_test_functions:
        assert either.Left(i).bind(f) == either.Left(i)
        assert either.Right(i).bind(f) == f(i)
Beispiel #2
0
def check_positive_data_length(
        tdms_operator: nptdms.TdmsFile) -> either.Either:
    """Checks whether the data length is positive"""
    max_length = tdms_helpers.get_maximum_array_size(tdms_operator)
    if max_length <= 0:
        return either.Left(ErrorCode.DATALENGTH_NONPOSITIVE)
    return either.Right(tdms_operator)
def test_check_chunksize_positive():
    invalid_meta = source.MetaData(
        chunk_size=-12,
        recurrence_size=10,
        recurrence_distance=1,
        consistency_sample_size=10,
    )
    assert error_handling.check_chunksize_positive(
        meta=invalid_meta
    ) == either.Left(error_handling.ErrorCode.CHUNKSIZE_NONPOSITIVE)

    valid_meta = source.MetaData(
        chunk_size=0,
        recurrence_size=10,
        recurrence_distance=1,
        consistency_sample_size=10,
    )
    assert error_handling.check_chunksize_positive(
        meta=valid_meta
    ) == either.Right(valid_meta)

    valid_meta = source.MetaData(
        chunk_size=12,
        recurrence_size=10,
        recurrence_distance=1,
        consistency_sample_size=10,
    )
    assert error_handling.check_chunksize_positive(
        meta=valid_meta
    ) == either.Right(valid_meta)
def test_check_recurrence_size_nonnegative():
    invalid_meta = source.MetaData(
        chunk_size=12,
        recurrence_size=-10,
        recurrence_distance=1,
        consistency_sample_size=10,
    )
    assert error_handling.check_recurrence_size_nonnegative(
        meta=invalid_meta
    ) == either.Left(error_handling.ErrorCode.RECURRENCESIZE_NEGATIVE)

    valid_meta = source.MetaData(
        chunk_size=12,
        recurrence_size=10,
        recurrence_distance=1,
        consistency_sample_size=10,
    )
    assert error_handling.check_recurrence_size_nonnegative(
        meta=valid_meta
    ) == either.Right(valid_meta)

    valid_meta = source.MetaData(
        chunk_size=12,
        recurrence_size=0,
        recurrence_distance=1,
        consistency_sample_size=10,
    )
    assert error_handling.check_recurrence_size_nonnegative(
        meta=valid_meta
    ) == either.Right(valid_meta)
Beispiel #5
0
def check_export_path(path: pathlib.Path, ) -> either.Either:
    """It should not be possible to choose a nonexistent folder in the export
    path. This function checks if this is satisfied.
    Return type is Either[error_handling.ErrorCode, pathlib.Path]"""

    if not path.parent.exists():
        return either.Left(error_handling.ErrorCode.EXPORTPATH_NONEXISTENT)
    return either.Right(path)
Beispiel #6
0
def test_concatenation():
    r = either.Right(1)
    res = r | (lambda x: either.Right(2 * x)) | (lambda x: either.Right(x + 1))
    assert res == either.Right(3)

    l = either.Left(1)
    res = l | (lambda x: either.Right(2 * x)) | (lambda x: either.Right(x + 1))
    assert res == l
Beispiel #7
0
def check_input_path(path: pathlib.Path) -> either.Either:
    """Checks if file at given path is a tdms file or a folder
    """
    try:
        nptdms.TdmsFile.open(file=path)
        return either.Right(path)
    except (FileNotFoundError, IsADirectoryError):
        if not path.is_dir():
            return either.Left(ErrorCode.PATH_NOT_TDMS_OR_DIR)
        return either.Right(path)
Beispiel #8
0
def check_for_same_length(tdms_operator: nptdms.TdmsFile) -> either.Either:
    """Checks whether all relevant channels of the Tdms file have the same
    length.
    """
    array_lengths = [[
        len(channel) for channel in group.channels() if len(channel) > 0
    ] for group in tdms_operator.groups()]
    array_lengths = np.array(array_lengths).flatten()
    all_lengths_equal = len(set(array_lengths)) == 1
    if not all_lengths_equal:
        return either.Left(ErrorCode.LENGTHERROR)
    return either.Right(tdms_operator)
Beispiel #9
0
def combine_with_tdms(
    tdms_path: pathlib.Path, ) -> Callable[[source.MetaData], either.Either]:
    """Returns a function which combines given MetaData with the TdmsFile located at tdms_path to
    a SourceFile object after
    consistency checks of this file. The return type of the returned function is
    Either[error_handling.ErrorCode,source.SourceFile]"""
    tdms_operator = (error_handling.load_tdms_file(path=tdms_path)
                     | error_handling.check_tdms)

    if isinstance(tdms_operator, either.Left):
        return lambda _: either.Left(error_handling.ErrorCode.
                                     TDMSPATH_NONEXISTENT)

    def _f(meta: source.MetaData) -> either.Either:
        return either.Right(
            source.SourceFile(tdms_operator=tdms_operator._value, meta=meta))

    return _f
def test_check_recurrence_size_smaller_chunk_size():
    invalid_meta = source.MetaData(
        chunk_size=10,
        recurrence_size=12,
        recurrence_distance=1,
        consistency_sample_size=10,
    )
    assert error_handling.check_recurrence_size_smaller_chunk_size(
        meta=invalid_meta
    ) == either.Left(error_handling.ErrorCode.RECURRENCESIZE_GREATER_CHUNKSIZE)

    valid_meta = source.MetaData(
        chunk_size=12,
        recurrence_size=10,
        recurrence_distance=1,
        consistency_sample_size=10,
    )
    assert error_handling.check_recurrence_size_smaller_chunk_size(
        meta=valid_meta
    ) == either.Right(valid_meta)
Beispiel #11
0
def load_tdms_file(path: pathlib.Path) -> either.Either:
    """Tries to load the tdms file located at path and returns Either[ErrorCode,np.tdms.TdmsFile]"""
    try:
        return either.Right(nptdms.TdmsFile.open(file=path))
    except FileNotFoundError:
        return either.Left(ErrorCode.TDMSPATH_NONEXISTENT)
Beispiel #12
0
def test_behavior_left():
    l = either.Left(1)
    res = l.bind(lambda x: either.Right(2 * x))
    assert res == l
Beispiel #13
0
def check_dir_empty(dir_path: pathlib.Path) -> either.Either:
    """Checks if directory at given path is empty
    """
    if not any(dir_path.iterdir()):
        return either.Left(ErrorCode.DIRPATH_EMPTY)
    return either.Right(dir_path)
Beispiel #14
0
def check_for_correct_repetition(
    source_file: source.SourceFile, ) -> either.Either:
    """Checks whether the meta data about the occurence of repetitions is valid
    for the Tdms file, i.e. whether repetitons really occur at the desired
    places.
    """
    # generate random test samples
    delete_ranges = np.array(calculate_drop_indices(source_file))
    number_samples_to_test = min(source_file.meta.consistency_sample_size,
                                 len(delete_ranges))
    # np.random.choice does only take 1d arrays, so we need this
    # workaround by choosing 1d indices in range with len(delete_indices)
    chosen_deletes = np.random.choice(len(delete_ranges),
                                      number_samples_to_test,
                                      replace=False)
    delete_ranges = delete_ranges[chosen_deletes]

    # prepare all tdms channels that contain data
    all_channels = list(
        itertools.chain.from_iterable(
            [[c for c in group.channels() if len(c) > 0]
             for group in source_file.tdms_operator.groups()]))

    # test data of each test sample

    meta_data_suitable = False

    for (offset, length) in delete_ranges:
        # calculate indices of the duplicates origin
        origin_offset = offset - source_file.meta.recurrence_distance

        # extract origin and duplicate data and compare
        duplicate_data = [
            old_channel.read_data(offset=offset, length=length)
            for old_channel in all_channels
        ]
        origin_data = [
            old_channel.read_data(offset=origin_offset, length=length)
            for old_channel in all_channels
        ]
        if not np.array_equal(duplicate_data, origin_data):
            meta_data_suitable = False
            break

        # extract data points around the data above
        duplicate_front_values = [
            old_channel.read_data(offset=offset - 1, length=1)[0]
            for old_channel in all_channels
        ]
        duplicate_rear_values = [
            old_channel.read_data(offset=offset + length, length=1)[0]
            for old_channel in all_channels
        ]

        origin_front_values = [
            old_channel.read_data(offset=origin_offset - 1, length=1)[0]
            for old_channel in all_channels
        ]
        origin_rear_values = [
            old_channel.read_data(offset=origin_offset + length, length=1)[0]
            for old_channel in all_channels
        ]
        # check if they are not part of duplication
        if not (np.array_equal(duplicate_front_values, origin_front_values)
                or np.array_equal(duplicate_rear_values, origin_rear_values)):
            meta_data_suitable = True

    if not meta_data_suitable:
        return either.Left(ErrorCode.PARAMETERERROR)
    return either.Right(source_file)
Beispiel #15
0
def check_chunksize_positive(meta: source.MetaData) -> either.Either:
    if meta.chunk_size < 0:
        return either.Left(ErrorCode.CHUNKSIZE_NONPOSITIVE)
    return either.Right(meta)
Beispiel #16
0
def check_recurrence_size_nonnegative(meta: source.MetaData) -> either.Either:
    if meta.recurrence_size < 0:
        return either.Left(ErrorCode.RECURRENCESIZE_NEGATIVE)
    return either.Right(meta)
Beispiel #17
0
def check_recurrence_size_smaller_chunk_size(
    meta: source.MetaData, ) -> either.Either:
    if meta.recurrence_size > meta.chunk_size:
        return either.Left(ErrorCode.RECURRENCESIZE_GREATER_CHUNKSIZE)
    return either.Right(meta)