示例#1
0
def test_1_single_process():
    """
    This is a single process example which is converted into a
    multicore example in test_1(), see below.

    The partitioning to obtain multiple cores and threads is
    done as follows.

    (1) put_data_in_stream() is converted to a function which
    is the target of a thread. In test_1() this function is
    source_thread_target(proc, stream_name)

    (2) double(x,y) is put in a separate process. The compute
    function of this process is f(). Since the parameters
    of compute_func are in_streams and out_streams, we get
    f from double in the following way:
    
    def f(in_streams, out_streams):
        double(in_stream=in_streams[0], out_stream=out_streams[0])
    

    (3) increment() and print_stream() are in a separate process.
    The compute function of this process is g().

    Run both test_1_single_process() and test_1() and look at
    their identical outputs.

    """

    # ********************************************************
    # We will put this function in its own thread in test_1()
    def put_data_in_stream(stream):
        num_steps=5
        step_size=4
        for i in range(num_steps):
            data = list(range(i*step_size, (i+1)*step_size))
            stream.extend(data)
            run()
        return

    # ********************************************************
    # We will put these lines in a separate process in test_1()
    x = Stream('x')
    y = Stream('y')
    double(x, y)

    # *********************************************************
    # We will put these lines in a separate process in test_1().
    s = Stream(name='s')
    increment(y, s)
    print_stream(s, name=s.name)

    # *********************************************************
    # This function is executed in a separate thread in test_1().
    put_data_in_stream(x)
示例#2
0
def print_input_stream(in_streams, out_streams):
    print_stream(in_streams[0], in_streams[0].name)
示例#3
0
 def m(in_streams, out_streams):
     s = Stream('s')
     sum_numbers(in_streams, s)
     print_stream(s, name='s')
示例#4
0
 def sums(in_streams, out_streams):
     s = Stream('s')
     sum_window(in_streams[0], s, window_size=3, step_size=3)
     print_stream(s, name='           p2')
示例#5
0
 def g(in_streams, out_streams):
     t = Stream('t')
     filter_then_square(in_streams[0], t,
                        filter_threshold=20)
     print_stream(t, name='p1')
示例#6
0
 def g(in_streams, out_streams):
     s = Stream(name='s')
     increment(in_streams[0], s)
     print_stream(s, name=s.name)
示例#7
0
 def compute_func(in_streams, out_streams):
     print_stream(multiply(in_streams[0], multiplicand=2))
示例#8
0
def send_event(stream, orientation):
    #Replace by Julian's send_event().
    print_stream(stream, name=stream.name + '_' + orientation)
示例#9
0
def test_pick_orientation_with_verbose_output():
    PHIDGETS_ACCELERATION_TO_G = 1.0 / 3.0
    DECIMATION = 2
    LTA_count = 2
    PICK_THRESHOLD = 0.5

    # ---------------------------------------------------------------
    # Input streams
    # raw is the stream of raw acceleration data along one axis.
    # timestamps is the stream of timestamps
    scaled = Stream('scaled')
    timestamps = Stream('timestamps')

    # Decimate acceleration.
    # Window of size DECIMATION is decimated to its average.
    # Input = scaled
    # Output = decimated.
    decimated = Stream('decimated')
    map_window(lambda v: sum(v) / float(len(v)),
               scaled,
               decimated,
               window_size=DECIMATION,
               step_size=DECIMATION)

    # Decimate timestamps.
    # Window of size DECIMATION is decimated to its last value.
    # Input = timestamps
    # Output = decimated_timestamps.
    decimated_timestamps = Stream('decimated_timestamps')
    map_window(lambda window: window[-1],
               timestamps,
               decimated_timestamps,
               window_size=DECIMATION,
               step_size=DECIMATION)

    # Demean (subtract mean) from decimated stream.
    # Subtract mean of window from the window's last value.
    # Move sliding window forward by 1 step.
    # Input = decimated
    # Output = demeaned
    demeaned = Stream('demeaned', initial_value=[0.0] * (LTA_count - 1))
    map_window(lambda window: window[-1] - sum(window) / float(len(window)),
               decimated,
               demeaned,
               window_size=LTA_count,
               step_size=1)

    # Add timestamps to demeaned accelerations.
    # Merges decimated_timestamps and demeaned to get timestamped_data.
    # Inputs = decimated_timestamps, demeaned
    # Outputs = timestamped_data
    timestamped_data = Stream('timestamped_data')
    zip_streams(in_streams=[decimated_timestamps, demeaned],
                out_stream=timestamped_data)

    # Detect picks.
    # Output a pick if the value part of the time_value (t_v) exceeds threshold.
    # Input = timestamped_data
    # Output = picks
    picks = Stream('picks')
    filter_element(lambda t_v: abs(t_v[1]) > PICK_THRESHOLD, timestamped_data,
                   picks)

    # Quench picks.
    # An element is a (timestamp, value).
    # Start a new quench when timestamp > QUENCH_PERIOD + last_quench.
    # Update the last quench when a new quench is initiated.
    # Initially the last_quench (i.e. state) is 0.
    # Input = picks
    # Output = quenched_picks
    quenched_picks = Stream('quenched_picks')

    # f is the filtering function
    def f(timestamped_value, last_quench, QUENCH_PERIOD):
        timestamp, value = timestamped_value
        new_quench = timestamp > QUENCH_PERIOD + last_quench
        last_quench = timestamp if new_quench else last_quench
        # return filter condition (new_quench) and next state (last_quench)
        return new_quench, last_quench

    filter_element(f, picks, quenched_picks, state=0, QUENCH_PERIOD=2)

    # Send quenched picks.
    send_event(quenched_picks, orientation='n')
    # ---------------------------------------------------------------

    # ---------------------------------------------------------------
    # Drive test
    print_stream(timestamps, 'timestamps')
    print_stream(scaled, 'scaled')
    print_stream(decimated, 'decimated')
    print_stream(decimated_timestamps, 'decimated_timestamps')
    print_stream(demeaned, 'demeaned')
    print_stream(timestamped_data, 'timestamped_data')
    print_stream(picks, 'picks')
    scaled.extend([1.0, 1.0, 2.0, 4.0, 4.0, 20.0, 8.0, 8.0, 4.0, 6.0])
    timestamps.extend(list(range(12)))
    run()
示例#10
0
 def compute_func(in_streams, out_streams):
     print_stream(in_streams[0])
示例#11
0
        threshold):
    
    map_window(f, in_stream, out_stream,
               long_window_size, step_size=1)

if __name__ == '__main__':
    import random

    # Create streams
    s = Stream('s')
    t = Stream('t')

    # Create agents
    ksigma(
        in_stream=s, out_stream=t,
        long_window_size=100, short_window_size=4,
        threshold=5)
    print_stream(t)

    # Drive network of agents with input data
    input_sequence = [random.random() for _ in range(100)]
    input_sequence.extend([10 + random.random() for _ in range(5)])
    input_sequence.extend([random.random() for _ in range(100)])
    s.extend(input_sequence)

    # Execute a step of the scheduler
    Stream.scheduler.step()