def example_1():
    source_list = range(10)

    def source(out_stream):
        return source_list_to_stream(source_list, out_stream)

    def compute_0(in_streams, out_streams):
        identity(in_streams[0], out_stream=out_streams[0])

    proc_0 = shared_memory_process(compute_func=compute_0,
                                   in_stream_names=['in'],
                                   out_stream_names=['out'],
                                   connect_sources=[('in', source)],
                                   name='process_0')

    def compute_1(in_streams, out_streams):
        stream_to_file(in_stream=in_streams[0], filename='result_1.dat')

    proc_1 = shared_memory_process(compute_func=compute_1,
                                   in_stream_names=['in'],
                                   out_stream_names=[],
                                   connect_sources=[],
                                   name='process_1')

    mp = Multiprocess(processes=[proc_0, proc_1],
                      connections=[(proc_0, 'out', proc_1, 'in')])
    mp.run()
Ejemplo n.º 2
0
def map_element_example_1():
    # STEP 1: DEFINE SOURCES
    source_list = range(10)

    def data_stream(out_stream):
        return source_list_to_stream(in_list=source_list,
                                     out_stream=out_stream)

    # STEP 2: DEFINE THE COMPUTATIONAL NETWORK OF AGENTS
    def compute_func(in_streams, out_streams):
        def f(x):
            return x * 10

        check_list = map(f, source_list)
        t = Stream()
        map_element(func=f, in_stream=in_streams[0], out_stream=t)
        check_correctness_of_output(in_stream=t, check_list=check_list)
        stream_to_file(in_stream=t, filename='map_element_example_1.dat')

    # STEP 3: CREATE THE PROCESS
    proc = shared_memory_process(compute_func=compute_func,
                                 in_stream_names=['test_input'],
                                 out_stream_names=[],
                                 connect_sources=[('test_input', data_stream)],
                                 connect_actuators=[],
                                 name='proc')

    # STEP 4: CREATE AND RUN A MULTIPROCESS APPLICATION
    mp = Multiprocess(processes=[proc], connections=[])
    mp.run()
Ejemplo n.º 3
0
def filter_element_example_1():
    # STEP 1: DEFINE SOURCES
    source_list = source_list = [1, 1, 3, 3, 5, 5, 7, 7, 9, 9]

    def source(out_stream):
        return source_list_to_stream(in_list=source_list,
                                     out_stream=out_stream)

    # STEP 2: DEFINE THE COMPUTATIONAL NETWORK OF AGENTS
    def compute_func(in_streams, out_streams):
        def less_than_n(v, n):
            return v <= n, n + 1

        check_list = [1, 3, 5, 7, 9]
        t = Stream()
        filter_element(func=less_than_n,
                       in_stream=in_streams[0],
                       out_stream=t,
                       state=0)
        check_correctness_of_output(in_stream=t, check_list=check_list)
        stream_to_file(in_stream=t, filename='filter_element_example_1.dat')

    # STEP 3: CREATE THE PROCESS
    proc = shared_memory_process(compute_func=compute_func,
                                 in_stream_names=['in'],
                                 out_stream_names=[],
                                 connect_sources=[('in', source)],
                                 connect_actuators=[],
                                 name='proc')

    # STEP 4: CREATE AND RUN A MULTIPROCESS APPLICATION
    mp = Multiprocess(processes=[proc], connections=[])
    mp.run()
Ejemplo n.º 4
0
def map_element_example_2():
    # STEP 1: DEFINE SOURCES
    source_list = 'hello world'

    def source(out_stream):
        return source_list_to_stream(in_list=source_list,
                                     out_stream=out_stream)

    # STEP 2: DEFINE THE COMPUTATIONAL NETWORK OF AGENTS
    def compute_func(in_streams, out_streams):
        import string
        f = string.upper
        check_list = map(f, source_list)
        t = Stream()
        map_element(func=f, in_stream=in_streams[0], out_stream=t)
        check_correctness_of_output(in_stream=t, check_list=check_list)
        stream_to_file(in_stream=t, filename='map_element_example_2.dat')

    # STEP 3: CREATE THE PROCESS
    proc = shared_memory_process(compute_func=compute_func,
                                 in_stream_names=['in'],
                                 out_stream_names=[],
                                 connect_sources=[('in', source)],
                                 connect_actuators=[],
                                 name='proc')

    # STEP 4: CREATE AND RUN A MULTIPROCESS APPLICATION
    mp = Multiprocess(processes=[proc], connections=[])
    mp.run()
Ejemplo n.º 5
0
def example_4():
    def q2f(q):
        queue_to_file(q, filename='result_max.dat', timeout=1.0)

    def q2f2(q):
        queue_to_file(q, filename='result_min.dat', timeout=1.0)

    def source(s):
        return source_func_to_stream(func=random.random,
                                     out_stream=s,
                                     num_steps=10)

    proc_0 = shared_memory_process(compute_func=max_min,
                                   in_stream_names=['in_0_0', 'in_0_1'],
                                   out_stream_names=['out_0_0', 'out_0_1'],
                                   connect_sources=[('in_0_0', source),
                                                    ('in_0_1', source)],
                                   name='process_0')

    proc_1 = shared_memory_process(compute_func=max_min,
                                   in_stream_names=['in_1_0', 'in_1_1'],
                                   out_stream_names=['out_1_0', 'out_1_1'],
                                   connect_sources=[],
                                   connect_actuators=[('out_1_0', q2f),
                                                      ('out_1_1', q2f2)],
                                   name='process_1')

    # Specify the multiprocess application.
    vm = Multiprocess(processes=[proc_0, proc_1],
                      connections=[(proc_0, 'out_0_0', proc_1, 'in_1_0'),
                                   (proc_0, 'out_0_1', proc_1, 'in_1_1')])
    vm.run()
Ejemplo n.º 6
0
def make_and_run_process(compute_func):
    proc = shared_memory_process(compute_func=compute_func,
                                 in_stream_names=['in'],
                                 out_stream_names=[],
                                 connect_sources=[('in', source)],
                                 connect_actuators=[],
                                 name='proc')
    mp = Multiprocess(processes=[proc], connections=[])
    mp.run()
Ejemplo n.º 7
0
def example_3():
    def q2f(q):
        queue_to_file(q, filename='result.dat', timeout=1.0)

    def source(s):
        return source_list_to_stream(range(10), s)

    def compute_0(in_streams, out_streams):
        identity(in_streams[0], out_stream=out_streams[0])

    proc_0 = shared_memory_process(compute_func=compute_0,
                                   in_stream_names=['in'],
                                   out_stream_names=['out'],
                                   connect_sources=[('in', source)],
                                   name='process_0')

    # Specify the process, proc_1
    def compute_1(in_streams, out_streams):
        r_mul(in_streams[0], out_streams[0], arg=20)

    proc_1 = shared_memory_process(compute_func=compute_1,
                                   in_stream_names=['in'],
                                   out_stream_names=['out'],
                                   connect_sources=[],
                                   name='process_1')

    # Specify the process, proc_2
    def compute_2(in_streams, out_streams):
        r_mul(in_streams[0], out_streams[0], arg=1000)

    proc_2 = shared_memory_process(compute_func=compute_2,
                                   in_stream_names=['in'],
                                   out_stream_names=['out'],
                                   connect_sources=[],
                                   name='process_2')

    # Specify the process, proc_3
    def compute_3(in_streams, out_streams):
        total(in_streams, out_streams[0])

    proc_3 = shared_memory_process(compute_func=compute_3,
                                   in_stream_names=['in_1', 'in_2'],
                                   out_stream_names=['out'],
                                   connect_sources=[],
                                   connect_actuators=[('out', q2f)],
                                   name='process_3')

    # Specify the multiprocess application.
    vm = Multiprocess(processes=[proc_0, proc_1, proc_2, proc_3],
                      connections=[(proc_0, 'out', proc_1, 'in'),
                                   (proc_0, 'out', proc_2, 'in'),
                                   (proc_1, 'out', proc_3, 'in_1'),
                                   (proc_2, 'out', proc_3, 'in_2')])
    vm.run()
def example_2():
    def source(out_stream):
        return source_list_to_stream(range(10), out_stream)

    def compute_0(in_streams, out_streams):
        identity(in_streams[0], out_stream=out_streams[0])

    proc_0 = shared_memory_process(compute_func=compute_0,
                                   in_stream_names=['in'],
                                   out_stream_names=['out'],
                                   connect_sources=[('in', source)],
                                   name='process_0')

    # Specify the process, proc_1
    def compute_1(in_streams, out_streams):
        multiply(in_streams[0], out_streams[0], operand=10)

    proc_1 = shared_memory_process(compute_func=compute_1,
                                   in_stream_names=['in'],
                                   out_stream_names=['out'],
                                   connect_sources=[],
                                   name='process_1')

    # Specify the process, proc_2
    def compute_2(in_streams, out_streams):
        multiply(in_streams[0], out_streams[0], operand=1000)

    proc_2 = shared_memory_process(compute_func=compute_2,
                                   in_stream_names=['in'],
                                   out_stream_names=['out'],
                                   connect_sources=[],
                                   name='process_2')

    # Specify the process, proc_3
    def compute_3(in_streams, out_streams):
        t = Stream()
        zip_stream(in_streams, t)
        stream_to_file(t, 'result_2.dat')

    proc_3 = shared_memory_process(compute_func=compute_3,
                                   in_stream_names=['in_1', 'in_2'],
                                   out_stream_names=[],
                                   connect_sources=[],
                                   name='process_3')

    # Specify the multiprocess application.
    vm = Multiprocess(processes=[proc_0, proc_1, proc_2, proc_3],
                      connections=[(proc_0, 'out', proc_1, 'in'),
                                   (proc_0, 'out', proc_2, 'in'),
                                   (proc_1, 'out', proc_3, 'in_1'),
                                   (proc_2, 'out', proc_3, 'in_2')])
    vm.run()
Ejemplo n.º 9
0
def two_echo_reverberation(
        original_sound_list, delays_and_attenuations_dict,
        output_file_name):
    """
    Parameters
    ----------
       original_sound_list: list of numbers
          The original sound as a list of float or int
       delays_and_attenuations_dict: dict
          key: name of an echo
          value: pair (delay, attenuation_vector)
             where delay: int
                     delay in number of sample points.
                   attenuation_vector: list of floats
                     attenuation due to dispersion.
                     Often this vector consists of a
                     single element.
        output_file_name: str
           The name of the file on which the heard
           sound is stored.
        

    """
    echo_names = delays_and_attenuations_dict.keys()
    aggregator = make_aggregator(
        original_sound_list, output_file_name, echo_names)
    aggregator_process = aggregator.make_process()
    # processes will be a list consisting of the aggregator
    # process and all the echo processes.
    processes = [aggregator_process]
    connections = []
    for echo_name, delay_and_attenuation in delays_and_attenuations_dict.items():
        delay, attenuation_vector = delay_and_attenuation
        echo =  make_echo(
            delay, attenuation_vector, echo_name)
        echo_process = echo.make_process()
        processes.append(echo_process)
        # Create a connection from the aggregator's output,
        # called 'original_sound_copy', to the echo process'
        # input, called 'original_sound'.
        connections.append(
            (aggregator_process, 'original_sound_copy',
             echo_process, 'original_sound'))
        # Create a connection from the echo process' output,
        # called echo_name, to the aggregator's input also
        # called echo_name.
        connections.append(
            (echo_process, echo_name,
             aggregator_process, echo_name))

    mp = Multiprocess(processes, connections)
    mp.run()
Ejemplo n.º 10
0
def simple_reverberation(original_sound_list, attenuation_vector, delay):
    # Create sources
    # Create agent that generates the original sound.
    # This agent runs in its own thread.
    def generate_original_sound(original_sound):
        return source_list_to_stream(
            in_list=original_sound_list,
            out_stream=original_sound,
            time_interval=0)

    # Create actuators
    # This example has no actuators

    # Define the computational function.
    def compute_func(in_streams, out_streams):
        # Name external streams for convenience
        original_sound = in_streams[0]
        heard_sound = out_streams[0]
        # Define internal streams
        echo = Stream(name='echo', initial_value=[0]*delay)
        
        # Create agents
        # Agent that creates heard sound from original sound and echo
        zip_map(func=sum,
                in_streams=[original_sound, echo],
                out_stream=heard_sound)
        # Agent that creates the echo from the heard sound.
        window_dot_product(
            in_stream=heard_sound, out_stream=echo,
            multiplicand_vector=attenuation_vector)
        # Agents that store sounds in files
        stream_to_file(in_stream=heard_sound, filename='heard.txt')
        stream_to_file(in_stream=echo, filename='echo.txt')
        stream_to_file(in_stream=original_sound, filename='original_sound.txt')

    # Create processes
    proc = shared_memory_process(
        compute_func=compute_func,
        in_stream_names=['original_sound'],
        out_stream_names=['heard_sound'],
        connect_sources=[('original_sound', generate_original_sound)],
        connect_actuators=[],
        name='proc')

    mp = Multiprocess(
        processes=[proc],
        connections=[])
    mp.run()
Ejemplo n.º 11
0
def test_exponential_smoothing(source_list, smoothing_factor):
    def source(out_stream):
        return source_list_to_stream(source_list, out_stream)

    def compute_func(in_streams, out_streams):
        stream_for_filing = Stream()
        exponential_smoothing(in_streams[0], stream_for_filing,
                              smoothing_factor)
        # Store output
        stream_to_file(stream_for_filing, 'exponential_smoothing.txt')

    proc = shared_memory_process(compute_func=compute_func,
                                 in_stream_names=['in'],
                                 out_stream_names=[],
                                 connect_sources=[('in', source)],
                                 connect_actuators=[],
                                 name='proc')
    mp = Multiprocess(processes=[proc], connections=[])
    mp.run()
Ejemplo n.º 12
0
def example_5():
    def q2f(q):
        queue_to_file(q, filename='result_max.dat', timeout=1.0)

    def q2f2(q):
        queue_to_file(q, filename='result_min.dat', timeout=1.0)

    def source(s):
        return source_func_to_stream(func=random.random,
                                     out_stream=s,
                                     num_steps=10)

    num_procs = 4
    num_in = 5
    procs = []
    in_stream_names = ['in_' + str(j) for j in range(num_in)]
    out_stream_names = ['max_values', 'min_values']
    for i in range(num_procs):
        proc = shared_memory_process(max_min,
                                     in_stream_names,
                                     out_stream_names,
                                     connect_sources=[
                                         (in_stream_name, source)
                                         for in_stream_name in in_stream_names
                                     ],
                                     name='process' + str(i))
        procs.append(proc)

    aggregator = shared_memory_process(compute_func=max_min,
                                       in_stream_names=['max_val', 'min_val'],
                                       out_stream_names=['max_max', 'min_min'],
                                       connect_sources=[],
                                       connect_actuators=[('max_max', q2f),
                                                          ('min_min', q2f2)],
                                       name='aggregator')

    # Specify the multiprocess application.
    vm = Multiprocess(processes=procs + [aggregator],
                      connections=[(proc, 'max_values', aggregator, 'max_val')
                                   for proc in procs] +
                      [(proc, 'min_values', aggregator, 'min_val')
                       for proc in procs])
    vm.run()
Ejemplo n.º 13
0
def map_element_example_3():
    # STEP 1: DEFINE SOURCES
    source_list = range(10)

    def source(out_stream):
        return source_list_to_stream(in_list=source_list,
                                     out_stream=out_stream)

    # Class used in map_element
    class example_class(object):
        def __init__(self, multiplicand):
            self.multiplicand = multiplicand
            self.running_sum = 0

        def step(self, v):
            result = v * self.multiplicand + self.running_sum
            self.running_sum += v
            return result

    # STEP 2: DEFINE THE COMPUTATIONAL NETWORK OF AGENTS
    def compute_func(in_streams, out_streams):
        eg = example_class(multiplicand=2)
        check_list = [0, 2, 5, 9, 14, 20, 27, 35, 44, 54]
        t = Stream()
        map_element(func=eg.step, in_stream=in_streams[0], out_stream=t)
        check_correctness_of_output(in_stream=t, check_list=check_list)
        stream_to_file(in_stream=t, filename='map_element_example_3.dat')

    # STEP 3: CREATE THE PROCESS
    proc = shared_memory_process(compute_func=compute_func,
                                 in_stream_names=['in'],
                                 out_stream_names=[],
                                 connect_sources=[('in', source)],
                                 connect_actuators=[],
                                 name='proc')

    # STEP 4: CREATE AND RUN A MULTIPROCESS APPLICATION
    mp = Multiprocess(processes=[proc], connections=[])
    mp.run()
Ejemplo n.º 14
0
def twitter_analysis(consumer_key, consumer_secret, access_token,
                     access_token_secret, trackwords, tweet_analyzer,
                     num_tweets):
    # SOURCE
    def source(out_stream):
        return twitter_to_stream(consumer_key, consumer_secret, access_token,
                                 access_token_secret, trackwords, out_stream,
                                 num_tweets)

    # COMPUTATIONAL FUNCTION
    def compute_func(in_streams, out_streams):
        sink_element(func=tweet_analyzer, in_stream=in_streams[0])

    # PROCESSES
    proc = shared_memory_process(compute_func=compute_func,
                                 in_stream_names=['in'],
                                 out_stream_names=[],
                                 connect_sources=[('in', source)],
                                 connect_actuators=[],
                                 name='proc')
    # CREATE AND RUN MULTIPROCESS APPLICATION
    mp = Multiprocess(processes=[proc], connections=[])
    mp.run()
Ejemplo n.º 15
0
def simple_reverberation(original_sound_list, delay, attenuation):
    # Create sources
    def generate_spoken_sound(spoken_sound):
        return source_list_to_stream(in_list=original_sound_list,
                                     out_stream=spoken_sound,
                                     time_interval=0)

    # Define the computational function.
    def compute_func(in_streams, out_streams):
        out_streams[0] = make_echo(in_streams[0], delay, attenuation)
        # Agents that store sounds in files used for testing.
        stream_to_file(in_stream=out_streams[0], filename='heard.txt')
        stream_to_file(in_stream=in_streams[0], filename='spoken.txt')

    # Create processes
    proc = shared_memory_process(compute_func=compute_func,
                                 in_stream_names=['in'],
                                 out_stream_names=['out'],
                                 connect_sources=[('in', generate_spoken_sound)
                                                  ],
                                 name='proc')

    mp = Multiprocess(processes=[proc], connections=[])
    mp.run()
Ejemplo n.º 16
0
def clock_offset_estimation_multiprocess():
    """
    An example of a multiprocess app. This example has three
    processes: proc_0 and proc_1 get time offsets from an ntp server,
    and put them on output streams. proc_2 gets these two streams as
    input, merges them and puts the resulting stream on a file called
    'offsets.dat'.

    """
    # ----------------------------------------------------------------
    #    DEFINE EACH OF THE PROCESSES
    # ----------------------------------------------------------------
    # The steps for creating a process are:
    # STEP 1: Define the sources: source()
    # STEP 2: Define the computational network: compute()
    # STEP 3: Call single_process_multiple_sources()
    # Carry out the above three steps for each process
    # STEP 4: The last step is to specify the connections between
    # processes, and then make and run the multiprocess app by
    # executing run_multiprocess()

    # Constants
    ntp_server_0 = '0.us.pool.ntp.org'
    ntp_server_1 = '1.us.pool.ntp.org'
    time_interval = 0.1
    num_steps = 20

    # ----------------------------------------------------------------
    # MAKE PROCESS proc_0
    # proc_0 has no input streams and has a single output
    # stream which is called 's'.
    # It has a single source: see source_0.
    # ----------------------------------------------------------------

    # STEP 1: DEFINE SOURCES
    def source_0(out_stream):
        return offsets_from_ntp_server(out_stream, ntp_server_0, time_interval,
                                       num_steps)

    # STEP 2: DEFINE THE COMPUTATIONAL NETWORK OF AGENTS
    # This network is empty; it merely passes its in_stream to its
    # out_stream.
    def compute(in_streams, out_streams):
        map_element(func=lambda x: x,
                    in_stream=in_streams[0],
                    out_stream=out_streams[0])

    # STEP 3: CREATE THE PROCESS
    # This process has a single source, no input stream, and an output
    # stream called 's'
    proc_0 = distributed_process(compute_func=compute,
                                 in_stream_names=['in'],
                                 out_stream_names=['s'],
                                 connect_sources=[('in', source_0)],
                                 name='process_1')

    # ----------------------------------------------------------------
    # MAKE PROCESS proc_1
    # proc_1 has no input streams and has a single output
    # stream which is called 's'.
    # It has a single source: see source_1.
    # ----------------------------------------------------------------

    # STEP 1: DEFINE SOURCES
    def source_1(out_stream):
        return offsets_from_ntp_server(out_stream, ntp_server_1, time_interval,
                                       num_steps)

    # STEP 2: DEFINE THE COMPUTATIONAL NETWORK OF AGENTS
    # This network is empty; it merely passes its in_stream to its
    # out_stream.
    def compute(in_streams, out_streams):
        map_element(func=lambda x: x,
                    in_stream=in_streams[0],
                    out_stream=out_streams[0])

    # STEP 3: CREATE THE PROCESS
    # This process has a single source, no input stream, and an output
    # stream called 's'
    proc_1 = distributed_process(compute_func=compute,
                                 in_stream_names=['in'],
                                 out_stream_names=['s'],
                                 connect_sources=[('in', source_1)],
                                 name='process_1')

    # ----------------------------------------------------------------
    # MAKE PROCESS proc_2
    # proc_2 has two input streams and no output stream.
    # It has no sources.
    # ----------------------------------------------------------------

    # STEP 1: DEFINE SOURCES
    # This process has no sources.

    # STEP 2: DEFINE COMPUTE_FUNC
    # The composed agent consists of two component agents:
    # (1) blend: an agent which blends (merges) in_streams and outputs
    #     merged_stream, and
    # (2) stream_to_file: a sink agent which inputs merged_stream and
    #     prints it.
    def compute(in_streams, out_streams):
        merged_stream = Stream('merge of two ntp server offsets')
        blend(func=identity, in_streams=in_streams, out_stream=merged_stream)
        stream_to_file(in_stream=merged_stream, filename='offsets.dat')

    # STEP 3: CREATE THE PROCESS
    # This process has no sources, two input streams, and no output
    # streams. We call the input streams 'u' and 'v'.
    proc_2 = distributed_process(compute_func=compute,
                                 in_stream_names=['u', 'v'],
                                 out_stream_names=[],
                                 connect_sources=[],
                                 name='process_2')

    # ----------------------------------------------------------------
    # FINAL STEP: RUN APPLICATION
    # Specify connections: A list of 4-tuples:
    # (process, output stream name, process, input stream name)
    # ----------------------------------------------------------------
    vm = Multiprocess(processes=[proc_0, proc_1, proc_2],
                      connections=[(proc_0, 's', proc_2, 'u'),
                                   (proc_1, 's', proc_2, 'v')])
    vm.run()
Ejemplo n.º 17
0
def single_process_single_source_example_1():
    """
    The application in this example consists of single process.
    The process has a single source and no actuator.
    The single source generates 1, 2, 3, 4, .....
    The compute function multiplies this sequence by 10
    and puts the result in the file called test.dat
    num_steps is the number of values output by the source.
    For example, if num_steps is 4 and test.dat is empty before the
    function is called then, test.dat will contain 10, 20, 30, 40
    on separate lines.

    The steps for creating a process are:
    (1) Define the sources.
        In this example we have two sources, source_0 and source_1
    (2) Define the actuators.
        In this example we have no actuators.
    (3) Define compute_func
    (4) Create the process by calling distributed_process()

    Final step
    After creating all processes, specify the connections between
    processes and run the application by calling run_multiprocess.

    """

    # STEP 1: DEFINE SOURCES
    def source(out_stream):
        """
        A simple source which outputs 1, 2, 3,... on
        out_stream.
        """
        def generate_sequence(state):
            return state + 1, state + 1

        # Return an agent which takes 10 steps, and
        # sleeps for 0.1 seconds between successive steps, and
        # puts the next element of the sequence in stream s,
        # and starts the sequence with value 0. The elements on
        # out_stream will be 1, 2, 3, ...
        return source_func_to_stream(func=generate_sequence,
                                     out_stream=out_stream,
                                     time_interval=0.1,
                                     num_steps=4,
                                     state=0)

    # STEP 2: DEFINE ACTUATORS
    # This example has no actuators

    # STEP 3: DEFINE COMPUTE_FUNC
    def compute_func(in_streams, out_streams):
        # This is a simple example of a composed agent consisting
        # of two component agents where the network has a single input
        # stream and no output stream.
        # The first component agent applies function f to each element
        # of in_stream, and puts the result in its output stream t.
        # The second component agent puts values in its input stream t
        # on a file called test.dat.
        # test.dat will contain 10, 20, 30, ....

        def f(x):
            return x * 10

        t = Stream()
        map_element(func=f, in_stream=in_streams[0], out_stream=t)
        stream_to_file(in_stream=t, filename='test.dat')

    # STEP 4: CREATE PROCESSES
    # This process has a single input stream that we call 'in' and it
    # has no output streams. We connect the source to the input stream
    # called 'in'.
    proc = distributed_process(compute_func=compute_func,
                               in_stream_names=['in'],
                               out_stream_names=[],
                               connect_sources=[('in', source)],
                               connect_actuators=[],
                               name='proc')

    # FINAL STEP: RUN APPLICATION
    # Since this application has a single process it has no
    # connections between processes.
    vm = Multiprocess(processes=[proc], connections=[])
    vm.run()
Ejemplo n.º 18
0
def multiprocess_example_1():
    """
    A simple example of a multiprocess application with two processes,
    proc_0 and proc_1. 
    proc_0 has a source, no input streams and a single output stream
    called 's'. 
    proc_1 has no sources, a single input stream called 't', and no
    output streams.
    The connections between processes is as follows:
       the output stream called 's' from proc_0 is the input stream
       called 't' in proc_1.
    The source in proc_0 generates 1, 2, 3, 4,.... and the
    agent in proc_0 multiplies these values by 10, and
    so proc_0 outputs 10, 20, 30, 40, ... on its output stream.
    proc_1 reads the output stream of proc_0, and its agent
    multiplies the elements in this stream by 200 and puts the
    values in a file called 'result.dat' which will contain:
    2000, 4000, 6000, ...

     The steps for creating a process are:
    (1) Define the sources.
        In this example we have two sources, source_0 and source_1
    (2) Define the actuators.
        In this example we have no actuators.
    (3) Define compute_func
    (4) Create the process by calling distributed_process()

    Final step
    After creating all processes, specify the connections between
    processes and run the application by calling run_multiprocess.

    """

    # A helper function
    def increment_state(state):
        return state + 1, state + 1

    # ----------------------------------------------------------------
    #    DEFINE EACH OF THE PROCESSES
    # ----------------------------------------------------------------

    # ----------------------------------------------------------------
    # MAKE PROCESS proc_0
    # proc_0 has no input streams and has a single output
    # stream which is called 't'.
    # It has a single source: see source_0.
    # ----------------------------------------------------------------
    # STEP 1: DEFINE SOURCES
    def source_0(out_stream):
        return source_func_to_stream(func=increment_state,
                                     out_stream=out_stream,
                                     time_interval=0.1,
                                     num_steps=10,
                                     state=0,
                                     window_size=1,
                                     name='source')

    # STEP 2: DEFINE ACTUATORS
    # This process has no actuators

    # # STEP 3: DEFINE COMPUTE_FUNC
    # The agent for this process is a single map_element agent.
    # The map element agent has a single input stream: in_streams[0],
    # and it has a single output stream: out_streams[0]. The elements
    # of the output stream are 10 times the elements of the input
    # stream.
    def compute_0(in_streams, out_streams):
        map_element(func=lambda x: 10 * x,
                    in_stream=in_streams[0],
                    out_stream=out_streams[0])

    # STEP 4: CREATE PROCESSES
    # This process has no input streams and has a single output stream
    # which is the stream produced by the compute_0() network of
    # agents, and this output stream is called 's'. It has a single
    # source agent: source_0().
    proc_0 = distributed_process(compute_func=compute_0,
                                 in_stream_names=['in'],
                                 out_stream_names=['s'],
                                 connect_sources=[('in', source_0)],
                                 name='process_0')

    # ----------------------------------------------------------------
    # MAKE PROCESS proc_1
    # proc_1 has one input stream, called 't' and has no output
    # streams
    # It has no sources.
    # ----------------------------------------------------------------

    # STEP 1: DEFINE SOURCES
    # This process has no sources; so skip this step.

    # # STEP 2: DEFINE ACTUATORS
    # This process has no actuators

    # # STEP 3: DEFINE COMPUTE_FUNC
    # This network consists of a map_element agent and
    # a file_to_stream agent which is a type of sink agent and which
    # puts the elements of result_stream on a file called 'results.dat.'
    # result_stream is internal to the network.
    def compute_1(in_streams, out_streams):
        result_stream = Stream('result of computation')
        map_element(func=lambda x: 200 * x,
                    in_stream=in_streams[0],
                    out_stream=result_stream)
        stream_to_file(in_stream=result_stream, filename='result.dat')

    # # STEP 4: CREATE PROCESSES
    # This process has a single input stream, called 't', produced by
    # proc_1. It has no output streams.
    proc_1 = distributed_process(compute_func=compute_1,
                                 in_stream_names=['t'],
                                 out_stream_names=[],
                                 connect_sources=[],
                                 name='process_1')

    # ----------------------------------------------------------------
    # FINAL STEP: RUN APPLICATION
    # Specify connections: A list of 4-tuples:
    # (process, output stream name, process, input stream name)
    # ----------------------------------------------------------------
    vm = Multiprocess(processes=[proc_0, proc_1],
                      connections=[(proc_0, 's', proc_1, 't')])
    vm.run()
Ejemplo n.º 19
0
def clock_offset_estimation_single_process_multiple_sources():
    """
    Another test of a single process with multiple sources and no
    actuators. 
    This process merges offsets received from two ntp sources and
    computes their average over a moving time window, and puts the
    result on a file, average.dat
    This process has two sources, each of which receives ntp offsets
    from ntp servers. The composed agent consists of three component
    agents: 
    (1) a component agent that merges the two sources, and
    (2) a component agent that computes the average of the merged
    stream over a window, and
    (3) a component sink agent that puts the averaged stream in file
    called 'average.dat'. 

    The steps for creating a process are:
    (1) Define the sources.
        In this example we have two sources, source_0 and source_1
    (2) Define the actuators.
        In this example we have no actuators.
    (3) Define compute_func
    (4) Create the process by calling distributed_process()

    Final steps: After creating all processes, specify the connections
    between processes and then run the application by calling
    run_multiprocess.

    """
    ntp_server_0 = '0.us.pool.ntp.org'
    ntp_server_1 = '1.us.pool.ntp.org'
    time_interval = 0.1
    num_steps = 20

    def average_of_list(a_list):
        if a_list:
            # Remove None elements from the list
            a_list = [i for i in a_list if i is not None]
            # Handle the non-empty list.
            if a_list:
                return sum(a_list) / float(len(a_list))
        # Handle the empty list
        return 0.0

    # STEP 1: DEFINE SOURCES
    def source_0(out_stream):
        return offsets_from_ntp_server(out_stream, ntp_server_0, time_interval,
                                       num_steps)

    def source_1(out_stream):
        return offsets_from_ntp_server(out_stream, ntp_server_1, time_interval,
                                       num_steps)

    # STEP 2: DEFINE ACTUATORS
    # This process has no actuators

    # STEP 3: DEFINE COMPUTE_FUNC
    # This composed agent has two input streams, one from each
    # source.
    # It has two internal streams: merged_stream and averaged_stream.
    # It has 3 component agents:
    # (1) blend: The composed agent's two input streams feed a blend
    # agent which outputs merged_stream.
    # (2) The map_window agent reads merged_stream and outputs
    # averaged_stream.
    # (3) The stream_to_file agent inputs averaged_stream. This agent
    # is a sink which puts the stream into the file called
    # 'average.dat'. The file will contain floating point numbers that
    # are the averages of the specified sliding winow.
    def compute_func(in_streams, out_streams):
        merged_stream = Stream('merge of two ntp server offsets')
        averaged_stream = Stream('sliding window average of offsets')
        blend(func=lambda x: x,
              in_streams=in_streams,
              out_stream=merged_stream)
        map_window(func=average_of_list,
                   in_stream=merged_stream,
                   out_stream=averaged_stream,
                   window_size=2,
                   step_size=1)
        stream_to_file(in_stream=averaged_stream, filename='average.dat')

    # STEP 4: CREATE PROCESSES
    proc = distributed_process(compute_func=compute_func,
                               in_stream_names=['ntp_0', 'ntp_1'],
                               out_stream_names=[],
                               connect_sources=[('ntp_0', source_0),
                                                ('ntp_1', source_1)],
                               connect_actuators=[],
                               name='proc')

    # FINAL STEP: RUN APPLICATION
    vm = Multiprocess(processes=[proc], connections=[])
    vm.run()
Ejemplo n.º 20
0
def simple_actuator_example():
    """
    This example has a single source which generates the sequence:
    1, 2, 3, .... It has a single actuator which gets messages from a
    queue and prints the message.
    
    The steps for creating a process are:
    (1) Define the sources.
        In this example we have two sources, source_0 and source_1
    (2) Define the actuators.
        In this example we have no actuators.
    (3) Define compute_func
    (4) Create the process by calling distributed_process()

    Final step
    After creating all processes, specify the connections between
    processes and run the application by calling run_multiprocess.

    """

    # STEP 1: DEFINE SOURCES
    def sequence_of_integers(current_integer, max_integer):
        next_integer = current_integer + 1
        if next_integer > max_integer:
            # return next output, next state
            return None, next_integer
        else:
            return next_integer, next_integer

    def sequence_of_integers_source(out_stream):
        return source_func_to_stream(func=sequence_of_integers,
                                     out_stream=out_stream,
                                     num_steps=15,
                                     window_size=1,
                                     state=0,
                                     max_integer=10)

    # STEP 2: DEFINE ACTUATORS
    def print_from_queue(q):
        while True:
            v = q.get()
            if v is None:
                # exit loop
                return True
            else:
                print 'next value in queue is ', v

    # STEP 3: DEFINE COMPUTE_FUNC
    def f(in_streams, out_streams):
        def identity(v):
            return v

        map_element(func=identity,
                    in_stream=in_streams[0],
                    out_stream=out_streams[0])

    # STEP 4: CREATE PROCESSES
    proc = distributed_process(compute_func=f,
                               in_stream_names=['in'],
                               out_stream_names=['out'],
                               connect_sources=[('in',
                                                 sequence_of_integers_source)],
                               connect_actuators=[['out', print_from_queue]])

    # FINAL STEP: RUN APPLICATION
    # Since this application has a single process it has no
    # connections between processes.
    vm = Multiprocess(processes=[proc], connections=[])
    vm.run()
Ejemplo n.º 21
0
def single_process_multiple_sources_example_1():
    """
    The application in this example consists of a single process.
    This process has two sources: source_0 generates 1, 2, 3, 4, ...
    and source_1 generates random numbers. The agent zips the
    two streams together and writes the result to a file called
    output.dat. This file will have
           (1, r1), (2, r2), (3, r3), ...
    where r1, r2,.... are random numbers.

    The steps for creating a process are:
    (1) Define the sources.
        In this example we have two sources, source_0 and source_1
    (2) Define the actuators.
        In this example we have no actuators.
    (3) Define compute_func
    (4) Create the process by calling distributed_process()

    Final step
    After creating all processes, specify the connections between
    processes and run the application by calling run_multiprocess. 

    """
    import random

    # STEP 1: DEFINE SOURCES
    def source_0(out_stream):
        # A simple source which outputs 1, 2, 3, 4, .... on
        # out_stream.
        def generate_sequence(state):
            return state + 1, state + 1

        # Return a source which takes 10 steps, and
        # sleeps for 0.1 seconds between successive steps, and
        # puts the next element of the sequence in out_stream,
        # and starts the sequence with value 0. The elements on
        # out_stream will be 1, 2, 3, ...
        return source_func_to_stream(func=generate_sequence,
                                     out_stream=out_stream,
                                     time_interval=0.1,
                                     num_steps=10,
                                     state=0)

    def source_1(out_stream):
        # A simple source which outputs random numbers on
        # out_stream.

        # Return a source which takes 10 steps, and sleeps for 0.1
        # seconds between successive steps, and puts a random number
        # on out_stream at each step.
        return source_func_to_stream(func=random.random,
                                     out_stream=out_stream,
                                     time_interval=0.1,
                                     num_steps=10)

    # STEP 2: DEFINE ACTUATORS
    # This example has no actuators

    # STEP 3: DEFINE COMPUTE_FUNC
    def compute_func(in_streams, out_streams):
        # This is a simple example of a composed agent consisting
        # of two component agents where the composed agent has two
        # input streams and no output stream.
        # The first component agent zips the two input streams and puts
        # the result on its output stream t which is internal to the
        # network.
        # The second component agent puts values in its input stream t
        # on a file called output.dat.
        from sink import stream_to_file
        # t is an internal stream of the network
        t = Stream()
        zip_stream(in_streams=in_streams, out_stream=t)
        stream_to_file(in_stream=t, filename='output.dat')

    # STEP 4: CREATE PROCESSES
    proc = distributed_process(compute_func=compute_func,
                               in_stream_names=['source_0', 'source_1'],
                               out_stream_names=[],
                               connect_sources=[('source_0', source_0),
                                                ('source_1', source_1)],
                               connect_actuators=[],
                               name='multiple source test')

    # FINAL STEP: RUN APPLICATION
    # Since this application has a single process it has no
    # connections between processes.
    vm = Multiprocess(processes=[proc], connections=[])
    vm.run()
Ejemplo n.º 22
0
def single_process_multiple_sources_example_1():
    """
    This example has two sources: sequence_of_integers and random_numbers.
    sequence_of_integers generates 1, 2, 3, 4, ... and random_numbers
    generates random numbers. The computation zips the two
    streams together and writes the result to a file called
    output.dat.
    
    num_steps is the number of values produced by the source. For
    example, if the smaller of the num_steps for each source is 10,
    then (1, r1), (2, r2), ..., (10, r10), ... will be appended to the
    file  output.dat where r1,..., r10 are random numbers.
 
    The steps for creating the process are:
    (1) Define the two sources:
            sequence_of_integers(out_stream), random_numbers(out_stream). 
    (2) Define the computational network:
        compute_func(in_streams, out_streams), where
        in_streams and out_streams are lists of streams.
        In this examples, in_streams is list consisting of
        two input streams, and out_streams is empty.
    (3) Call proc = shared_memory_process(...) to create a process
        proc.
    Next we make a multiprocess application consisting of the single
    process proc. Since the application has a single process it has
    no connections to other processes.
    (4) Call mp = Multiprocess(processes=[proc], connections=[])
        to make mp, a multiprocess application, and then call
        mp.run() to run the application.

    """
    import random

    # STEP 1: DEFINE SOURCES
    def sequence_of_integers(out_stream):
        # A simple source which outputs 1, 2, 3, 4, .... on
        # out_stream.
        def generate_sequence(state):
            return state + 1, state + 1

        # Return an agent which takes 10 steps, and
        # sleeps for 0.1 seconds between successive steps, and
        # puts the next element of the sequence in out_stream,
        # and starts the sequence with value 0. The elements on
        # out_stream will be 1, 2, 3, ...
        return source_func_to_stream(func=generate_sequence,
                                     out_stream=out_stream,
                                     time_interval=0.1,
                                     num_steps=10,
                                     state=0)

    def random_numbers(out_stream):
        # A simple source which outputs random numbers on
        # out_stream.

        # Return an agent which takes 10 steps, and sleeps for 0.1
        # seconds between successive steps, and puts a random number
        # on out_stream at each step.
        return source_func_to_stream(func=random.random,
                                     out_stream=out_stream,
                                     time_interval=0.1,
                                     num_steps=10)

    # STEP 2: DEFINE THE COMPUTATIONAL NETWORK OF AGENTS
    def compute_func(in_streams, out_streams):
        # in_streams and out_streams are lists of streams.
        # This is a simple example of a network of agents consisting
        # of two agents where the network has two input streams and no
        # output stream.
        # The first agent zips the two input streams and puts
        # the result on its output stream t which is internal to the
        # network.
        # The second agent puts values in its input stream t
        # on a file called output.dat.
        from sink import stream_to_file
        # t is an internal stream of the network
        t = Stream()
        zip_stream(in_streams=in_streams, out_stream=t)
        stream_to_file(in_stream=t, filename='output.dat')

    # STEP 3: CREATE THE PROCESS
    # Create a process with three threads:
    # two source threads and a compute thread.
    # The two source threads execute the functions sequence_of_integers
    # and random_numbers
    # The compute thread executes function compute_func.
    # The names of the inputs of compute_func are:
    # 'sequence_of_integers' and 'data'.
    # The source, sequence_of_integers, is connected to the
    # in_stream called 'sequence_of_integers'. The source
    # random_numbers' is connected to the in_stream called
    # 'data'
    proc = shared_memory_process(
        compute_func=compute_func,
        in_stream_names=['sequence_of_integers', 'data'],
        out_stream_names=[],
        connect_sources=[('sequence_of_integers', sequence_of_integers),
                         ('data', random_numbers)],
        connect_actuators=[],
        name='proc')
    # STEP 4: CREATE AND RUN A MULTIPROCESS APPLICATION
    mp = Multiprocess(processes=[proc], connections=[])
    mp.run()
Ejemplo n.º 23
0
def single_process_single_source_example_1():
    """
    The single source generates 1, 2, 3, 4, .....
    The compute function multiplies this sequence by 10
    and puts the result in the file called test.dat
    num_steps is the number of values output by the source.
    For example, if num_steps is 4 and test.dat is empty before the
    function is called then, test.dat will contain 10, 20, 30, 40
    on separate lines.

    The steps to create the process are:
    (1) Define the source: sequence_of_integers(out_stream), where
        out_stream is a stream into which source data is output.
    (2) Define the computational network:
        compute_func(in_streams, out_streams), where
        in_streams and out_streams are lists of streams.
        In this example in_streams is list consisting of
        a single input stream, and out_streams is empty.
    (3) Call proc = shared_memory_process(...) to create a process
        proc.
    Next we make a multiprocess application consisting of the single
    process proc. Since the application has a single process it has
    no connections to other processes.
    (4) Call mp = Multiprocess(processes=[proc], connections=[])
        to make mp, a multiprocess application, and then call
        mp.run() to run the application.
    """

    # STEP 1: DEFINE SOURCES
    def sequence_of_integers(out_stream):
        """
        A simple source which outputs 1, 2, 3,... on
        out_stream.
        """
        def generate_sequence(state):
            return state + 1, state + 1

        # Return an agent which takes 10 steps, and
        # sleeps for 0.1 seconds between successive steps, and
        # puts the next element of the sequence in stream s,
        # and starts the sequence with value 0. The elements on
        # out_stream will be 1, 2, 3, ...
        return source_func_to_stream(func=generate_sequence,
                                     out_stream=out_stream,
                                     time_interval=0.1,
                                     num_steps=4,
                                     state=0)

    # STEP 2: DEFINE THE COMPUTATIONAL NETWORK OF AGENTS
    def compute_func(in_streams, out_streams):
        # A trivial example of a network of agents consisting
        # of two agents where the network has a single input
        # stream: in_stream.
        # The first agent applies function f to each element
        # of in_stream, and puts the result in its output stream t.
        # The second agent puts values in its input stream t
        # on a file called test.dat.
        # test.dat will contain 10, 20, 30, ....

        def f(x):
            return x * 10

        t = Stream()
        map_element(func=f, in_stream=in_streams[0], out_stream=t)
        stream_to_file(in_stream=t, filename='test.dat')

    # STEP 3: CREATE THE PROCESS
    # Create a process with two threads: a source thread and
    # a compute thread. The source thread executes the function
    # sequence_of_integers, and the compute thread executes
    # the function compute_func. The source is connected to
    # the in_stream called 'in' of compute_func.
    # The names of in_streams are arbitrary.
    proc = shared_memory_process(compute_func=compute_func,
                                 in_stream_names=['in'],
                                 out_stream_names=[],
                                 connect_sources=[('in', sequence_of_integers)
                                                  ],
                                 connect_actuators=[],
                                 name='proc')

    # STEP 4: CREATE AND RUN A MULTIPROCESS APPLICATION
    mp = Multiprocess(processes=[proc], connections=[])
    mp.run()
Ejemplo n.º 24
0
def twitter_parallel(consumer_key, consumer_secret, access_token,
                     access_token_secret, trackwords, num_steps):

    # PROCESS 0
    def source(out_stream):
        return twitter_to_stream(consumer_key, consumer_secret, access_token,
                                 access_token_secret, trackwords, out_stream,
                                 num_steps)

    def compute_func_0(in_streams, out_streams):
        map_element(lambda x: x,
                    in_stream=in_streams[0],
                    out_stream=out_streams[0])

    proc_0 = shared_memory_process(compute_func=compute_func_0,
                                   in_stream_names=['in'],
                                   out_stream_names=['out'],
                                   connect_sources=[('in', source)],
                                   name='proc_0')

    # PROCESS 1
    def compute_func_1(in_streams, out_streams):
        def get_sentiment(tweet):
            tweet_text = get_text(tweet)
            sentiment_of_tweet = sentiment_of_text(tweet_text)
            return (tweet_text, sentiment_of_tweet)

        map_element(func=get_sentiment,
                    in_stream=in_streams[0],
                    out_stream=out_streams[0])

    proc_1 = shared_memory_process(compute_func=compute_func_1,
                                   in_stream_names=['in'],
                                   out_stream_names=['out'],
                                   connect_sources=[],
                                   name='proc_1')

    # PROCESS 2
    def compute_func_2(in_streams, out_streams):
        map_element(func=followers_and_retweets_of_tweet,
                    in_stream=in_streams[0],
                    out_stream=out_streams[0])

    proc_2 = shared_memory_process(compute_func=compute_func_2,
                                   in_stream_names=['in'],
                                   out_stream_names=['out'],
                                   connect_sources=[],
                                   name='proc_2')

    # PROCESS 3
    def compute_func_3(in_streams, out_streams):
        t = Stream()
        zip_stream(in_streams, out_stream=t)
        stream_to_file(in_stream=t, filename='result.dat')

    proc_3 = shared_memory_process(compute_func=compute_func_3,
                                   in_stream_names=['in_1', 'in_2'],
                                   out_stream_names=[],
                                   connect_sources=[],
                                   name='proc_3')

    mp = Multiprocess(processes=[proc_0, proc_1, proc_2, proc_3],
                      connections=[(proc_0, 'out', proc_1, 'in'),
                                   (proc_0, 'out', proc_2, 'in'),
                                   (proc_1, 'out', proc_3, 'in_1'),
                                   (proc_2, 'out', proc_3, 'in_2')])
    mp.run()
Ejemplo n.º 25
0
def clock_offset_estimation_single_process_multiple_sources():
    """
    Another test of single_process_multiple_sources().
    This process merges offsets received from two ntp sources and
    computes their average over a moving time window, and puts the
    result on a file, average.dat
    This process has two sources, each of which receives ntp offsets
    from ntp servers. The computational network consists of three
    agents: 
    (1) an agent that merges the two sources, and
    (2) an agent that computes the average of the merged stream over a
    window, and
    (3) a sink agent that puts the averaged stream in file called
    'average.dat'. 

    The steps for creating the process are:
    (1) Define the two sources:
            ntp_0(out_stream), ntp_1(out_stream). 
    (2) Define the computational network:
        compute_func(in_streams, out_streams), where
        in_streams and out_streams are lists of streams.
        In this examples, in_streams is list consisting of
        two input streams, and out_streams is empty.
    (3) Call proc = shared_memory_process(...) to create a process
        proc.
    Next we make a multiprocess application consisting of the single
    process proc. Since the application has a single process it has
    no connections to other processes.
    (4) Call mp = Multiprocess(processes=[proc], connections=[])
        to make mp, a multiprocess application, and then call
        mp.run() to run the application.


    """
    ntp_server_0 = '0.us.pool.ntp.org'
    ntp_server_1 = '1.us.pool.ntp.org'
    time_interval = 0.1
    num_steps = 20

    def average_of_list(a_list):
        if a_list:
            # Remove None elements from the list
            a_list = [i for i in a_list if i is not None]
            # Handle the non-empty list.
            if a_list:
                return sum(a_list) / float(len(a_list))
        # Handle the empty list
        return 0.0

    # STEP 1: DEFINE SOURCES
    def ntp_0(out_stream):
        return offsets_from_ntp_server(out_stream, ntp_server_0, time_interval,
                                       num_steps)

    def ntp_1(out_stream):
        return offsets_from_ntp_server(out_stream, ntp_server_1, time_interval,
                                       num_steps)

    # STEP 2: DEFINE THE COMPUTATIONAL NETWORK OF AGENTS
    # This network has two input streams, one from each source
    # It has two internal streams: merged_stream and averaged_stream.
    # It has 3 agents.
    # (1) The networks two input streams feed a blend agent which
    # outputs merged_stream.
    # (2) The map_window agent reads merged_stream and outputs
    # averaged_stream.
    # (3) The stream_to_file agent inputs averaged_stream. This agent
    # is a sink which puts the stream into the file called
    # 'average.dat'. The file will contain floating point numbers that
    # are the averages of the specified sliding winow.
    def compute_func(in_streams, out_streams):
        merged_stream = Stream('merge of two ntp server offsets')
        averaged_stream = Stream('sliding window average of offsets')
        blend(func=lambda x: x,
              in_streams=in_streams,
              out_stream=merged_stream)
        map_window(func=average_of_list,
                   in_stream=merged_stream,
                   out_stream=averaged_stream,
                   window_size=2,
                   step_size=1)
        stream_to_file(in_stream=averaged_stream, filename='average.dat')

    # STEP 3: CREATE THE PROCESS
    # Create a process with three threads:
    # two source threads and a compute thread.
    # The two source threads execute the functions ntp_0
    # and ntp_1
    # The compute thread executes function compute_func.
    # The names of the inputs of compute_func are:
    # 'source_0' and 'source_1'.
    # The source, ntp_0, is connected to the
    # in_stream called 'source_0'. The source
    # ntp_1 is connected to the in_stream called
    # 'source_1'
    proc = shared_memory_process(compute_func=compute_func,
                                 in_stream_names=['source_0', 'source_1'],
                                 out_stream_names=[],
                                 connect_sources=[('source_0', ntp_0),
                                                  ('source_1', ntp_1)],
                                 connect_actuators=[],
                                 name='proc')
    # STEP 4: CREATE AND RUN A MULTIPROCESS APPLICATION
    mp = Multiprocess(processes=[proc], connections=[])
    mp.run()
Ejemplo n.º 26
0
    magnitude.txt

    Parameters
    ----------
    filenames: list
       filenames is a list of filename
       where filename is the name of a file containing
       data in one of the axes --- east, north, vertical ---
       generated by a sensor.

    """
    return shared_memory_process(compute_func=compute_func,
                                 in_stream_names=directions,
                                 out_stream_names=['out'],
                                 connect_sources=[
                                     (directions[i], source(filenames[i]))
                                     for i in range(len(directions))
                                 ],
                                 connect_actuators=[
                                     ('out', anomalies_actuator.actuate)
                                 ])


if __name__ == '__main__':
    # filenames has data recorded from east, north,
    # and vertical directions from a sensor
    filenames = ['S1.e.txt', 'S1.n.txt', 'S1.z.txt']
    proc = local_anomaly_process(filenames)
    mp = Multiprocess(processes=[proc], connections=[])
    mp.run()