Exemple #1
0
        def pick_orientation(scaled, timestamps, orientation):
            """ Sends picks on a single orientation, either 'n', 'e', or 'z'. """
            # ---------------------------------------------------------------
            # CREATE AGENTS AND STREAMS
            # ---------------------------------------------------------------

            # 1. DECIMATE SCALED DATA.
            # Window of size DECIMATION is decimated to its average.
            decimated = Stream('decimated')
            map_window(lambda v: sum(v) / float(len(v)), scaled, decimated,
                       window_size=self.decimation, step_size=self.decimation)

            # 2. DECIMATE TIMESTAMPS.
            # Window of size DECIMATION is decimated to its last value.
            decimated_timestamps = Stream('decimated_timestamps')
            map_window(lambda window: window[-1],
                       timestamps, decimated_timestamps,
                       window_size=self.decimation, step_size=self.decimation)

            # 3. DEMEAN (subtract mean from) DECIMATED STREAM.
            # Subtract mean of window from the window's last value.
            # Move sliding window forward by 1 step.
            demeaned = Stream('demeaned', initial_value=[0.0] * (LTA_count - 1))
            map_window(lambda window: window[-1] - sum(window) / float(len(window)),
                       decimated, demeaned,
                       window_size=LTA_count, step_size=1)

            # 4. MERGE TIMESTAMPS WITH DEMEANED ACCELERATIONS.
            # Merges decimated_timestamps and demeaned to get timestamped_data.
            timestamped_data = Stream('timestamped_data')
            zip_streams(in_streams=[decimated_timestamps, demeaned], out_stream=timestamped_data)

            # 5. DETECT PICKS.
            # Output a pick if the value part of the time_value (t_v) exceeds threshold.
            picks = Stream('picks')
            filter_element(lambda t_v: abs(t_v[1]) > self.pick_threshold, timestamped_data, picks)

            # 6. QUENCH PICKS.
            # An element is a (timestamp, value).
            # Start a new quench when timestamp > QUENCH_PERIOD + last_quench.
            # Update the last quench when a new quench is initiated.
            # Initially the last_quench (i.e. state) is 0.
            quenched_picks = Stream('quenched_picks')

            # f is the filtering function
            def f(timestamped_value, last_quench, QUENCH_PERIOD):
                timestamp, value = timestamped_value
                new_quench = timestamp > QUENCH_PERIOD + last_quench
                last_quench = timestamp if new_quench else last_quench
                # return filter condition (new_quench) and next state (last_quench)
                return new_quench, last_quench

            filter_element(f, picks, quenched_picks, state=0, QUENCH_PERIOD=2)

            # 7. SEND QUENCHED PICKS.
            self.send_event(quenched_picks)
Exemple #2
0
    def compute_func(in_streams, out_streams):
        def f(x):
            return x < 5

        check_list = filter(f, source_list)
        t = Stream()
        filter_element(func=f, in_stream=in_streams[0], out_stream=t)
        check_correctness_of_output(in_stream=t, check_list=check_list)
        stream_to_file(
            in_stream=t,
            filename='single_process_single_source_filter_example_1.dat')
    def compute_func(in_streams, out_streams):
        def less_than_n(v, n):
            return v <= n, n + 1

        check_list = [1, 3, 5, 7, 9]
        t = Stream()
        filter_element(func=less_than_n,
                       in_stream=in_streams[0],
                       out_stream=t,
                       state=0)
        check_correctness_of_output(in_stream=t, check_list=check_list)
        stream_to_file(in_stream=t, filename='filter_element_example_1.dat')
from stream import _no_value, _multivalue
from check_agent_parameter_types import *
from recent_values import recent_values
from op import filter_element

scheduler = Stream.scheduler
# In the following, x is a stream that must be declared
# before the functions are called.
x = Stream('x')
#----------------------------------------------------------------    
# Filter to only have odd numbers
#----------------------------------------------------------------
def is_odd_number(v):
    return not v%2
odd = Stream()
filter_element(func=is_odd_number, in_stream=x, out_stream=odd)
# Example: If x = [0, 1, 2,.... ] then y is [0, 2, 4, ...]

#----------------------------------------------------------------    
# Filter to only have even numbers
#----------------------------------------------------------------
def is_even_number(v):
    return v%2
even = Stream()
filter_element(func=is_even_number, in_stream=x, out_stream=even)

#----------------------------------------------------------------  
 # Filter to only have positive numbers
#----------------------------------------------------------------
# Test filtering
def positive(v): return v < 0
Exemple #5
0
def examples_filter_element():
    x = Stream('x')
    #----------------------------------------------------------------
    # Filter to only have even numbers
    #----------------------------------------------------------------
    even = Stream()
    filter_element(func=lambda v: not v % 2, in_stream=x, out_stream=even)
    # Example: If x = [0, 1, 2, 3, ... ] then y is [0, 2, 4, ...]

    #----------------------------------------------------------------
    # Filter to only have odd numbers
    #----------------------------------------------------------------
    odd = Stream()
    filter_element(func=lambda v: v % 2, in_stream=x, out_stream=odd)

    #----------------------------------------------------------------
    # Filter to only have negative numbers
    #----------------------------------------------------------------
    neg = Stream('negative')
    filter_element(func=lambda v: v < 0, in_stream=x, out_stream=neg)

    #----------------------------------------------------------------
    # Filter to only have non_negativenumbers
    #----------------------------------------------------------------
    non_neg = Stream('non_negative')
    filter_element(func=lambda v: v >= 0, in_stream=x, out_stream=non_neg)

    #----------------------------------------------------------------
    # filter_element with state and no additional arguments
    #----------------------------------------------------------------
    def less_than_n(v, state):
        next_output_element = (v <= state)
        next_state = state + 1
        return next_output_element, next_state

    y = Stream('y')
    less = Stream()
    filter_element(func=less_than_n, in_stream=y, out_stream=less, state=0)

    # State on j-th step is j.
    # less_than_n(v, state) returns (v < j) on the j-th step.
    # less filters out all elements v for which v > j
    # So if y is [1, 5, 0, 2, 6, 3] then since states are [ 0, 1, 2, 3, 4,..]
    # then since not(y[0] <= 0), not(y[1] <= 1),
    # y[2] <= 2, y[3] <=3, .... the sequence of outputs of the function
    # less_than_v are [(False, 0), (False, 1), (True, 2), (True, 3), ...]. So
    # the output stream contains y[2], y[3], ... or [0, 2, ...]

    #----------------------------------------------------------------
    # filter_element with state and with additional keyword arguments
    #----------------------------------------------------------------
    # The keyword argument is addend.
    def less_than_n_plus_addend(v, state, addend):
        # return pair: boolean filter, next state
        return v <= state + addend, state + 1

    z = Stream('z')
    less_addend = Stream()
    filter_element(func=less_than_n_plus_addend,
                   in_stream=z,
                   out_stream=less_addend,
                   state=0,
                   addend=3)

    # State on j-th step is j.
    # Stream less contains z[j] if and only if z[j] <= j+3
    # For example, if z = [2, 3, 3, 4, 10, 15, 7, .....] then the
    # output stream is [2, 3, 3, 4, 7, ...]

    #----------------------------------------------------------------
    # filter out numbers above the threshold
    #----------------------------------------------------------------
    def threshold(v, threshold):
        return v > threshold

    above_threshold = Stream('above threshold')
    filter_element(func=threshold,
                   in_stream=x,
                   out_stream=above_threshold,
                   threshold=0)

    # Put data into input streams and run.
    DATA_x = list(range(-5, 5, 1))
    x.extend(DATA_x)
    DATA_y = [1, 5, 0, 2, 6, 3]
    y.extend(DATA_y)
    DATA_z = [2, 3, 3, 4, 10, 15, 7]
    z.extend(DATA_z)

    run()

    # Inspect output
    assert recent_values(even) == [-4, -2, 0, 2, 4]
    assert recent_values(odd) == [-5, -3, -1, 1, 3]
    assert recent_values(non_neg) == [0, 1, 2, 3, 4]
    assert recent_values(neg) == [-5, -4, -3, -2, -1]
    assert recent_values(less) == [0, 2, 3]
    assert recent_values(less_addend) == [2, 3, 3, 4, 7]
    assert recent_values(above_threshold) == [1, 2, 3, 4]
Exemple #6
0
def f(in_streams, out_streams):
    """
    Compute Function for Sensor Reader
    Parameters
    ----------
    in_streams: list of input Streams - acceleration reordered to conform SAF standard of [N, E, Z]
        in_streams[0] - acceleration N
        in_streams[1] - acceleration E
        in_streams[2] - acceleration Z
        in_streams[3] - timestamp
    out_streams: list of Streams
        out_streams[0] - acceleration N (averaged and picked)
        out_streams[1] - acceleration E (averaged and picked)
        out_streams[2] - acceleration Z (averaged and picked)
        out_streams[3] - timestamp
    """
    n_acc = len(in_streams) - 1

    # DECLARE STREAMS

    scaled_acc = [Stream('scaled_acc_' + str(i)) for i in range(n_acc)]
    inverted_acc = Stream('inverted_acc')  # stream for inverted acceleration E
    averaged_acc = [Stream('averaged_acc_' + str(i)) for i in range(n_acc)]
    acc_timestamp = Stream(
        'acc_timestamp')  # timestamp corresponding to the averaged_acc
    acc_merged = Stream(
        'acc_merged')  # acceleration stream merged with timestamp stream
    acc_picked = Stream(
        'acc_picked')  # stream of acc data picked according to timestamp

    # CREATE AGENTS

    # 1. SCALE ACCELERATION
    # our special order CSN Phidgets are scaled to +/- 2g instead of +/- 6g
    def scale_g(v):
        return PHIDGETS_ACCELERATION_TO_G * v

    for i in range(n_acc):
        map_element(func=scale_g,
                    in_stream=in_streams[i],
                    out_stream=scaled_acc[i])

    # TODO: CHECK AND REPORT MISSING SAMPLES
    # if self.last_phidgets_timestamp:
    #     sample_increment = int(
    #         round((phidgets_timestamp - self.last_phidgets_timestamp) / PHIDGETS_NOMINAL_DATA_INTERVAL))
    #     if sample_increment > 4 * self.decimation:
    #         logging.warn('Missing >3 samples: last sample %s current sample %s missing samples %s', \
    #                      self.last_phidgets_timestamp, phidgets_timestamp, sample_increment)
    #     elif sample_increment == 0:
    #         logging.warn('Excess samples: last sample %s current sample %s equiv samples %s', \
    #                      self.last_phidgets_timestamp, phidgets_timestamp, sample_increment)

    # 2. INVERT ACCELERATION E
    # invert channel 1 (E-W) - this results in +1g being reported when the sensor is resting on its E side
    def invert_channel(v):
        return -1 * v

    map_element(func=invert_channel,
                in_stream=scaled_acc[1],
                out_stream=inverted_acc)

    # 3. AVERAGE WINDOW
    def average_samples(window):
        return sum(window) / float(len(window))

    # average for inverted channel
    map_window(func=average_samples,
               in_stream=inverted_acc,
               out_stream=averaged_acc[1],
               window_size=PHIDGETS_DECIMATION,
               step_size=PHIDGETS_DECIMATION)

    for i in [0, 2]:
        map_window(func=average_samples,
                   in_stream=scaled_acc[i],
                   out_stream=averaged_acc[i],
                   window_size=PHIDGETS_DECIMATION,
                   step_size=PHIDGETS_DECIMATION)

    # 4. OBTAIN CORRESPONDING TIMESTAMP
    def get_timestamp(window):
        return window[-1]

    map_window(func=get_timestamp,
               in_stream=in_streams[3],
               out_stream=acc_timestamp,
               window_size=PHIDGETS_DECIMATION,
               step_size=PHIDGETS_DECIMATION)

    # 5. ZIP ACCELERATION AND TIMESTAMP STREAMS
    zip_stream(in_streams=averaged_acc + [acc_timestamp],
               out_stream=acc_merged)

    # 6. QUENCH SENSOR READING
    def timestamp_picker(v, state):
        if v[3] - state > PICKER_INTERVAL:  # generate output
            return False, v[3]
        else:
            return True, state

    filter_element(func=timestamp_picker,
                   in_stream=acc_merged,
                   out_stream=acc_picked,
                   state=0)

    # 7. UNZIP STREAM - to pass streams to other processes
    unzip(in_stream=acc_picked, out_streams=out_streams)
Exemple #7
0
def g(in_streams, out_streams):
    """
    Compute Function for Picker
    Parameters
    ----------
    in_streams: list of input Streams passed from sensor reader process (f)
        in_streams[0] - acceleration N
        in_streams[1] - acceleration E
        in_streams[2] - acceleration Z
        in_streams[3] - timestamp
    """

    # DECLARE STREAMS

    adjusted_acc = [
        Stream('adjusted_acc_{}'.format(i))
        for i in range(len(in_streams) - 1)
    ]
    adjusted_timestamp = Stream('adjusted_timestamp')
    merged_acc = Stream('acc_merged')
    filtered_acc = Stream('filtered_acc')
    quenched_acc = Stream('quenched_acc')

    # DEFINE AGENTS

    # 1. ADJUST LTA - subtract long-term-average from sample data
    def adjust_lta(window):
        return abs(window[-1] - sum(window) / len(window))

    for i in range(len(in_streams) - 1):
        map_window(func=adjust_lta,
                   in_stream=in_streams[i],
                   out_stream=adjusted_acc[i],
                   window_size=LTA_COUNT,
                   step_size=1)

    # 2. ADJUST TIMESTAMP - obtain timestamp corresponding to each window
    def adjust_timestamp(window):
        return window[-1]

    map_window(func=adjust_timestamp,
               in_stream=in_streams[-1],
               out_stream=adjusted_timestamp,
               window_size=LTA_COUNT,
               step_size=1)

    # 3. ZIP STREAM - zip acceleration and timestamp streams
    zip_stream(in_streams=adjusted_acc + [adjusted_timestamp],
               out_stream=merged_acc)

    # 4. DETECT ANOMALY - filter out small magnitude to report only large acceleration
    def detect_anomaly(v):
        return any(map(lambda x: x > PICKER_THRESHOLD, v[:-1]))

    filter_element(func=detect_anomaly,
                   in_stream=merged_acc,
                   out_stream=filtered_acc)

    # 5. QUENCH PICKER
    def quench_picker(v, state):
        timestamp = v[3]
        if timestamp - state < MINIMUM_REPICK_INTERVAL_SECONDS:
            return _no_value, state
        else:
            state = timestamp
            return v, state

    map_element(func=quench_picker,
                in_stream=filtered_acc,
                out_stream=quenched_acc,
                state=0)

    # 6. STREAM RESULTS TO FILE - for test purposes
    stream_to_file(quenched_acc, './phidget_data.txt')
Exemple #8
0
def test_pick_orientation_with_verbose_output():
    PHIDGETS_ACCELERATION_TO_G = 1.0 / 3.0
    DECIMATION = 2
    LTA_count = 2
    PICK_THRESHOLD = 0.5

    # ---------------------------------------------------------------
    # Input streams
    # raw is the stream of raw acceleration data along one axis.
    # timestamps is the stream of timestamps
    scaled = Stream('scaled')
    timestamps = Stream('timestamps')

    # Decimate acceleration.
    # Window of size DECIMATION is decimated to its average.
    # Input = scaled
    # Output = decimated.
    decimated = Stream('decimated')
    map_window(lambda v: sum(v) / float(len(v)),
               scaled,
               decimated,
               window_size=DECIMATION,
               step_size=DECIMATION)

    # Decimate timestamps.
    # Window of size DECIMATION is decimated to its last value.
    # Input = timestamps
    # Output = decimated_timestamps.
    decimated_timestamps = Stream('decimated_timestamps')
    map_window(lambda window: window[-1],
               timestamps,
               decimated_timestamps,
               window_size=DECIMATION,
               step_size=DECIMATION)

    # Demean (subtract mean) from decimated stream.
    # Subtract mean of window from the window's last value.
    # Move sliding window forward by 1 step.
    # Input = decimated
    # Output = demeaned
    demeaned = Stream('demeaned', initial_value=[0.0] * (LTA_count - 1))
    map_window(lambda window: window[-1] - sum(window) / float(len(window)),
               decimated,
               demeaned,
               window_size=LTA_count,
               step_size=1)

    # Add timestamps to demeaned accelerations.
    # Merges decimated_timestamps and demeaned to get timestamped_data.
    # Inputs = decimated_timestamps, demeaned
    # Outputs = timestamped_data
    timestamped_data = Stream('timestamped_data')
    zip_streams(in_streams=[decimated_timestamps, demeaned],
                out_stream=timestamped_data)

    # Detect picks.
    # Output a pick if the value part of the time_value (t_v) exceeds threshold.
    # Input = timestamped_data
    # Output = picks
    picks = Stream('picks')
    filter_element(lambda t_v: abs(t_v[1]) > PICK_THRESHOLD, timestamped_data,
                   picks)

    # Quench picks.
    # An element is a (timestamp, value).
    # Start a new quench when timestamp > QUENCH_PERIOD + last_quench.
    # Update the last quench when a new quench is initiated.
    # Initially the last_quench (i.e. state) is 0.
    # Input = picks
    # Output = quenched_picks
    quenched_picks = Stream('quenched_picks')

    # f is the filtering function
    def f(timestamped_value, last_quench, QUENCH_PERIOD):
        timestamp, value = timestamped_value
        new_quench = timestamp > QUENCH_PERIOD + last_quench
        last_quench = timestamp if new_quench else last_quench
        # return filter condition (new_quench) and next state (last_quench)
        return new_quench, last_quench

    filter_element(f, picks, quenched_picks, state=0, QUENCH_PERIOD=2)

    # Send quenched picks.
    send_event(quenched_picks, orientation='n')
    # ---------------------------------------------------------------

    # ---------------------------------------------------------------
    # Drive test
    print_stream(timestamps, 'timestamps')
    print_stream(scaled, 'scaled')
    print_stream(decimated, 'decimated')
    print_stream(decimated_timestamps, 'decimated_timestamps')
    print_stream(demeaned, 'demeaned')
    print_stream(timestamped_data, 'timestamped_data')
    print_stream(picks, 'picks')
    scaled.extend([1.0, 1.0, 2.0, 4.0, 4.0, 20.0, 8.0, 8.0, 4.0, 6.0])
    timestamps.extend(list(range(12)))
    run()