Пример #1
0
 def compute_func(in_streams, out_streams):
     # This is a simple example of a composed agent consisting
     # of two component agents where the composed agent has two
     # input streams and no output stream.
     # The first component agent zips the two input streams and puts
     # the result on its output stream t which is internal to the
     # network.
     # The second component agent puts values in its input stream t
     # on a file called output.dat.
     from sink import stream_to_file
     # t is an internal stream of the network
     t = Stream()
     zip_stream(in_streams=in_streams, out_stream=t)
     stream_to_file(in_stream=t, filename='output.dat')
 def compute_3(in_streams, out_streams):
     t = Stream()
     zip_stream(in_streams, t)
     stream_to_file(t, 'result_2.dat')
Пример #3
0
def f(in_streams, out_streams):
    """
    Compute Function for Sensor Reader
    Parameters
    ----------
    in_streams: list of input Streams - acceleration reordered to conform SAF standard of [N, E, Z]
        in_streams[0] - acceleration N
        in_streams[1] - acceleration E
        in_streams[2] - acceleration Z
        in_streams[3] - timestamp
    out_streams: list of Streams
        out_streams[0] - acceleration N (averaged and picked)
        out_streams[1] - acceleration E (averaged and picked)
        out_streams[2] - acceleration Z (averaged and picked)
        out_streams[3] - timestamp
    """
    n_acc = len(in_streams) - 1

    # DECLARE STREAMS

    scaled_acc = [Stream('scaled_acc_' + str(i)) for i in range(n_acc)]
    inverted_acc = Stream('inverted_acc')  # stream for inverted acceleration E
    averaged_acc = [Stream('averaged_acc_' + str(i)) for i in range(n_acc)]
    acc_timestamp = Stream(
        'acc_timestamp')  # timestamp corresponding to the averaged_acc
    acc_merged = Stream(
        'acc_merged')  # acceleration stream merged with timestamp stream
    acc_picked = Stream(
        'acc_picked')  # stream of acc data picked according to timestamp

    # CREATE AGENTS

    # 1. SCALE ACCELERATION
    # our special order CSN Phidgets are scaled to +/- 2g instead of +/- 6g
    def scale_g(v):
        return PHIDGETS_ACCELERATION_TO_G * v

    for i in range(n_acc):
        map_element(func=scale_g,
                    in_stream=in_streams[i],
                    out_stream=scaled_acc[i])

    # TODO: CHECK AND REPORT MISSING SAMPLES
    # if self.last_phidgets_timestamp:
    #     sample_increment = int(
    #         round((phidgets_timestamp - self.last_phidgets_timestamp) / PHIDGETS_NOMINAL_DATA_INTERVAL))
    #     if sample_increment > 4 * self.decimation:
    #         logging.warn('Missing >3 samples: last sample %s current sample %s missing samples %s', \
    #                      self.last_phidgets_timestamp, phidgets_timestamp, sample_increment)
    #     elif sample_increment == 0:
    #         logging.warn('Excess samples: last sample %s current sample %s equiv samples %s', \
    #                      self.last_phidgets_timestamp, phidgets_timestamp, sample_increment)

    # 2. INVERT ACCELERATION E
    # invert channel 1 (E-W) - this results in +1g being reported when the sensor is resting on its E side
    def invert_channel(v):
        return -1 * v

    map_element(func=invert_channel,
                in_stream=scaled_acc[1],
                out_stream=inverted_acc)

    # 3. AVERAGE WINDOW
    def average_samples(window):
        return sum(window) / float(len(window))

    # average for inverted channel
    map_window(func=average_samples,
               in_stream=inverted_acc,
               out_stream=averaged_acc[1],
               window_size=PHIDGETS_DECIMATION,
               step_size=PHIDGETS_DECIMATION)

    for i in [0, 2]:
        map_window(func=average_samples,
                   in_stream=scaled_acc[i],
                   out_stream=averaged_acc[i],
                   window_size=PHIDGETS_DECIMATION,
                   step_size=PHIDGETS_DECIMATION)

    # 4. OBTAIN CORRESPONDING TIMESTAMP
    def get_timestamp(window):
        return window[-1]

    map_window(func=get_timestamp,
               in_stream=in_streams[3],
               out_stream=acc_timestamp,
               window_size=PHIDGETS_DECIMATION,
               step_size=PHIDGETS_DECIMATION)

    # 5. ZIP ACCELERATION AND TIMESTAMP STREAMS
    zip_stream(in_streams=averaged_acc + [acc_timestamp],
               out_stream=acc_merged)

    # 6. QUENCH SENSOR READING
    def timestamp_picker(v, state):
        if v[3] - state > PICKER_INTERVAL:  # generate output
            return False, v[3]
        else:
            return True, state

    filter_element(func=timestamp_picker,
                   in_stream=acc_merged,
                   out_stream=acc_picked,
                   state=0)

    # 7. UNZIP STREAM - to pass streams to other processes
    unzip(in_stream=acc_picked, out_streams=out_streams)
Пример #4
0
 def compute_func_3(in_streams, out_streams):
     t = Stream()
     zip_stream(in_streams, out_stream=t)
     stream_to_file(in_stream=t, filename='result.dat')
Пример #5
0
def g(in_streams, out_streams):
    """
    Compute Function for Picker
    Parameters
    ----------
    in_streams: list of input Streams passed from sensor reader process (f)
        in_streams[0] - acceleration N
        in_streams[1] - acceleration E
        in_streams[2] - acceleration Z
        in_streams[3] - timestamp
    """

    # DECLARE STREAMS

    adjusted_acc = [
        Stream('adjusted_acc_{}'.format(i))
        for i in range(len(in_streams) - 1)
    ]
    adjusted_timestamp = Stream('adjusted_timestamp')
    merged_acc = Stream('acc_merged')
    filtered_acc = Stream('filtered_acc')
    quenched_acc = Stream('quenched_acc')

    # DEFINE AGENTS

    # 1. ADJUST LTA - subtract long-term-average from sample data
    def adjust_lta(window):
        return abs(window[-1] - sum(window) / len(window))

    for i in range(len(in_streams) - 1):
        map_window(func=adjust_lta,
                   in_stream=in_streams[i],
                   out_stream=adjusted_acc[i],
                   window_size=LTA_COUNT,
                   step_size=1)

    # 2. ADJUST TIMESTAMP - obtain timestamp corresponding to each window
    def adjust_timestamp(window):
        return window[-1]

    map_window(func=adjust_timestamp,
               in_stream=in_streams[-1],
               out_stream=adjusted_timestamp,
               window_size=LTA_COUNT,
               step_size=1)

    # 3. ZIP STREAM - zip acceleration and timestamp streams
    zip_stream(in_streams=adjusted_acc + [adjusted_timestamp],
               out_stream=merged_acc)

    # 4. DETECT ANOMALY - filter out small magnitude to report only large acceleration
    def detect_anomaly(v):
        return any(map(lambda x: x > PICKER_THRESHOLD, v[:-1]))

    filter_element(func=detect_anomaly,
                   in_stream=merged_acc,
                   out_stream=filtered_acc)

    # 5. QUENCH PICKER
    def quench_picker(v, state):
        timestamp = v[3]
        if timestamp - state < MINIMUM_REPICK_INTERVAL_SECONDS:
            return _no_value, state
        else:
            state = timestamp
            return v, state

    map_element(func=quench_picker,
                in_stream=filtered_acc,
                out_stream=quenched_acc,
                state=0)

    # 6. STREAM RESULTS TO FILE - for test purposes
    stream_to_file(quenched_acc, './phidget_data.txt')