def single_process_subscriber(): # This VM has a single process called proc_1. # This process has a single input stream that we # call 'in' and it has no output streams; so # out_stream_names is empty. Elements arriving on # the input stream are copied to a file called # result.dat (See compute_func.) # This process has no source threads that # generate data. The input comes only from # subscribing to a stream. Because this process # has no source threads, connect_sources is # empty. Likewise, since it has no actuators, # connect_actuators is empty. def compute_func(in_streams, out_streams): stream_to_file(in_streams[0], 'result.dat') proc_1 = distributed_process(compute_func=compute_func, in_stream_names=['in'], out_stream_names=[], connect_sources=[], connect_actuators=[], name='proc_1') # This VM consists of a single process. So, it has # no connections to other processes within the same # shared-memory multicore machine. # It is a subscriber to a stream called # copy_of_source_list # Elements received on this stream are passed to the # stream called 'in' inside the process called proc_1. vm_1 = VM(processes=[proc_1], connections=[], subscribers=[(proc_1, 'in', 'copy_of_source_list')]) vm_1.start()
def two_process_publisher(): source_list = range(10) def source(out_stream): return source_list_to_stream(source_list, out_stream) def compute_0(in_streams, out_streams): map_element( func=lambda x: x, in_stream=in_streams[0], out_stream=out_streams[0]) proc_0 = distributed_process( compute_func=compute_0, in_stream_names=['in'], out_stream_names=['out'], connect_sources=[('in', source)], name='process_0') def compute_1(in_streams, out_streams): map_element( func=lambda x: x+10, in_stream=in_streams[0], out_stream=out_streams[0]) proc_1 = distributed_process( compute_func=compute_1, in_stream_names=['in'], out_stream_names=['out'], connect_sources=[], name='process_1' ) vm_0 = VM( processes=[proc_0, proc_1], connections=[(proc_0, 'out', proc_1, 'in')], publishers=[(proc_1, 'out', 'publication')]) vm_0.start()
def global_aggregator(): def compute_func(in_streams, out_streams): """ Parameters ---------- in_streams: list of Stream in_streams is a list of anomaly streams with one stream from each sensor. An anomaly stream is a sequence of 0.0 and 1.0 where 0.0 indicates no anomaly and 1.0 indicates an anomaly. out_streams: list of Stream This list consists of a single stream that contains 0.0 when no global anomaly across all sensors is detected and 1.0 when a global anomaly is detected. """ aggregate_anomalies(in_streams, out_streams, timed_window_size=2) proc = distributed_process(compute_func=compute_func, in_stream_names=['in_1', 'in_2'], out_stream_names=[], connect_sources=[], name='global aggregator') vm = VM(processes=[proc], connections=[], subscribers=[(proc, 'in_1', 'S1'), (proc, 'in_2', 'S2')]) vm.start()
class Debugger(): def __init__(self): self.vm = VM() self.vm.add_hook(DebugHooks.PreInstruction, self.pre_instruction_hook) self.vm.add_hook(DebugHooks.PreMethod, self.pre_method_hook) self.vm.add_hook(DebugHooks.PostMethod, self.post_method_hook) def pre_instruction_hook(self, instruction): print instruction.label + ':\t' + instruction.name self.handle_input() def pre_method_hook(self, method): print 'Entered method ' + method.methodDefinition.namespace + '::' + method.methodDefinition.name def post_method_hook(self, method): print 'Exited method ' + method.methodDefinition.namespace + '::' + method.methodDefinition.name def handle_input(self): while True: r = raw_input('> ') if r == 's': for item in reversed(self.vm.stack.stack): print item elif r == 'q': exit() elif r == 'g': self.vm.remove_hook(DebugHooks.PreInstruction, self.pre_instruction_hook) return elif r == 'm': print 'method' elif r.startswith('l '): filename = '../tests/' + r[2:] + ".il" try: self.vm.load(filename) print 'Loaded ' + filename self.vm.start() print 'Execution finished' print 'Return code: ' + str(self.vm.stack.pop()) return except IOError: print 'Unable to load file' except Exception as e: print 'Error: ' + str(e) traceback.print_exc(file=sys.stdout) else: return def start(self): while True: self.handle_input()
class Debugger(): def __init__(self): self.vm = VM() self.vm.add_hook(DebugHooks.PreInstruction, self.pre_instruction_hook) self.vm.add_hook(DebugHooks.PreMethod, self.pre_method_hook) self.vm.add_hook(DebugHooks.PostMethod, self.post_method_hook) def pre_instruction_hook(self, instruction): print instruction.label + ':\t' + instruction.name self.handle_input() def pre_method_hook(self, method): print 'Entered method ' + method.methodDefinition.namespace + '::' + method.methodDefinition.name def post_method_hook(self, method): print 'Exited method ' + method.methodDefinition.namespace + '::' + method.methodDefinition.name def handle_input(self): while True: r = raw_input('> ') if r == 's': for item in reversed(self.vm.stack.stack): print item elif r == 'q': exit() elif r == 'g': self.vm.remove_hook(DebugHooks.PreInstruction, self.pre_instruction_hook) return elif r == 'm': print 'method' elif r.startswith('l '): filename = '../tests/' + r[2:] + ".il" try: self.vm.load(filename) print 'Loaded ' + filename self.vm.start() print 'Execution finished' print 'Return code: ' + str(self.vm.stack.pop()) return except IOError: print 'Unable to load file' except Exception as e: print 'Error: ' + str(e) traceback.print_exc(file=sys.stdout) else: return def start(self): while True: self.handle_input()
def single_process_publication_subscriber(): """ The application in this example consists of single process. The process has no source and no actuator. It has a single in_stream called 'in'. This example creates a virtual machine (vm) which subscribes to a stream called 'sequence'. The steps for creating a process are: (1) Define the sources. In this example we have no sources. (2) Define the actuators. In this example we have no actuators. (3) Define compute_func. This process has a single input stream and no output stream. (4) Create the process by calling distributed_process() Final step After creating all processes, specify the connections between processes and run the virtual machine.. """ # SKIP STEPS 1, 2 BECAUSE NO SOURCES OR ACTUATORS. # STEP 3: DEFINE COMPUTE_FUNC def g(in_streams, out_streams): def print_element(v): print 'stream element is ', v sink_element(func=print_element, in_stream=in_streams[0]) # STEP 4: CREATE PROCESSES proc_1 = distributed_process(compute_func=g, in_stream_names=['in'], out_stream_names=[], connect_sources=[], connect_actuators=[], name='proc_1') # FINAL STEP: CREATE A VM AND START IT. # Since this application has a single process it has no # connections between processes. The process, proc_1, subscribes # to a stream called 'sequence'. This process does not publish # streams. vm_1 = VM(processes=[proc_1], connections=[], subscribers=[(proc_1, 'in', 'sequence')]) vm_1.start()
def single_process_publisher(): # This VM has a single process called proc_0. # This process has a single input stream called # 'in' and a single output stream called 'out'. See # in_stream_names=['in'], out_stream_names=['out']. # It has a single source thread which puts elements # of source_list into the stream called 'in'. # The process has no actuators, and so connect_actuators # is empty. See connect_sources=[('in', source)], # connect_actuators=[]. # The computational thread of this process merely # copies its input stream to its output stream (see # compute_func.) # The process publishes its output stream with the # publication name 'copy_of_source_list'. See # publishers=[(proc_0, 'out', 'copy_of_source_list')] # This VM consists of a single process and so it has # no connections to other processes within the same # multicore machine; so, connections is the empty list. source_list = range(10) def source(out_stream): return source_list_to_stream( source_list, out_stream, time_interval=0.01) def compute_func(in_streams, out_streams): map_element( func=lambda x: x, in_stream=in_streams[0], out_stream=out_streams[0]) proc_0 = distributed_process( compute_func=compute_func, in_stream_names=['in'], out_stream_names=['out'], connect_sources=[('in', source)], connect_actuators=[], name='proc_0') vm_0 = VM( processes=[proc_0], connections=[], publishers=[(proc_0, 'out', 'copy_of_source_list')]) vm_0.start()
def two_process_subscriber(): def compute_2(in_streams, out_streams): map_element(func=lambda x: x * 100, in_stream=in_streams[0], out_stream=out_streams[0]) proc_2 = distributed_process(compute_func=compute_2, in_stream_names=['in'], out_stream_names=['out'], connect_sources=[], name='process_3') def compute_3(in_streams, out_streams): stream_to_file(in_streams[0], 'result.dat') proc_3 = distributed_process(compute_func=compute_3, in_stream_names=['in'], out_stream_names=[], connect_sources=[], name='process_1') vm_1 = VM(processes=[proc_2, proc_3], connections=[(proc_2, 'out', proc_3, 'in')], subscribers=[(proc_2, 'in', 'publication')]) vm_1.start()
def single_process_publisher(): source_list = range(10) def source(out_stream): return source_list_to_stream(source_list, out_stream, time_interval=0.2) def compute_func(in_streams, out_streams): map_element(func=lambda x: x, in_stream=in_streams[0], out_stream=out_streams[0]) proc_0 = distributed_process(compute_func=compute_func, in_stream_names=['in'], out_stream_names=['out'], connect_sources=[('in', source)], connect_actuators=[], name='proc_0') vm_0 = VM(processes=[proc_0], connections=[], publishers=[(proc_0, 'out', 'copy_of_source_list')]) vm_0.start()
def single_process_publication_producer(): """ The application in this example consists of single process. The process has a single source and no actuator. The single source generates 1, 2, 3, 4, ..... The compute function multiplies this sequence by 10 and puts the result in the file called test.dat num_steps is the number of values output by the source. For example, if num_steps is 4 and test.dat is empty before the function is called then, test.dat will contain 10, 20, 30, 40 on separate lines. The steps for creating a process are: (1) Define the sources. In this example we have two sources, source_0 and source_1 (2) Define the actuators. In this example we have no actuators. (3) Define compute_func (4) Create the process by calling distributed_process() Final step After creating all processes, specify the connections between processes and run the application. """ # STEP 1: DEFINE SOURCES def source(out_stream): """ A simple source which outputs 1, 2, 3,... on out_stream. """ def generate_sequence(state): return state + 1, state + 1 # Return an agent which takes 10 steps, and # sleeps for 0.1 seconds between successive steps, and # puts the next element of the sequence in stream s, # and starts the sequence with value 0. The elements on # out_stream will be 1, 2, 3, ... return source_func_to_stream(func=generate_sequence, out_stream=out_stream, time_interval=0.1, num_steps=10, state=0) # STEP 2: DEFINE ACTUATORS # This example has no actuators # STEP 3: DEFINE COMPUTE_FUNC def f(in_streams, out_streams): map_element(func=lambda x: 7 * x, in_stream=in_streams[0], out_stream=out_streams[0]) # STEP 4: CREATE PROCESSES # This process has a single input stream that we call 'in' and it # has no output streams. We connect the source to the input stream # called 'in'. proc_0 = distributed_process(compute_func=f, in_stream_names=['in'], out_stream_names=['out'], connect_sources=[('in', source)], connect_actuators=[], name='proc_0') # FINAL STEP: CREATE A VM AND START IT. # Since this application has a single process it has no # connections between processes. The process, proc_0, publishes # its output stream called 'out' to a stream called # 'sequence'. This process does not subscribe to streams. vm_0 = VM(processes=[proc_0], connections=[], publishers=[(proc_0, 'out', 'sequence')]) vm_0.start()
def two_process_publication_example_1(): """ The application in this example consists of two VMs. It is a small extension of def single_process_publication_example_1(). The first VM has two processes, proc_0 and proc_1. The second VM has two processes, proc_2 and proc_3. THE FIRST VM PROC_0 proc_0 has a single source and no actuator. The single source generates 1, 2, 3, 4, ..... num_steps is the number of values output by the source. The compute function has a single in_stream and a single out_stream. commpute_func merely passes its in_stream to its out_stream. PROC_1 proc_1 has no sources or actuators. Its compute function has a single in_stream and a single out_stream. compute_func multiples its input elements by 10 and puts the results on out_stream. THE SECOND VM PROC_2 proc_2 has no sources or actuators. Its compute function has a single in_stream and a single out_stream. compute_func multiplies elements in in_stream by 2 and places results on out_stream. PROC_3 proc_2 has no sources or actuators. Its compute function has a single in_stream and no out_stream. compute_func prints its in_stream. The steps for creating a process are: (1) Define the sources. In this example we have two sources, source_0 and source_1 (2) Define the actuators. In this example we have no actuators. (3) Define compute_func (4) Create the process by calling distributed_process() (5) Create a VM fter creating all processes in the VM. Do this by specifying the connections between processes within the VM and by specifying the streams published by the VM and the streams subscribed to by the VM. (6) Start the VMs. (7) Join the VMs. Skip this step if a VM is persistent. """ #------------------------ #------------------------ # VM_0 #------------------------ #------------------------ #------------------------ # proc_0 in VM_0 #------------------------ # STEP 1: DEFINE SOURCES def source(out_stream): """ A simple source which outputs 1, 2, 3,... on out_stream. """ def generate_sequence(state): return state + 1, state + 1 # Return an agent which takes 10 steps, and # sleeps for 0.1 seconds between successive steps, and # puts the next element of the sequence in stream s, # and starts the sequence with value 0. The elements on # out_stream will be 1, 2, 3, ... return source_func_to_stream(func=generate_sequence, out_stream=out_stream, time_interval=0.1, num_steps=4, state=0) # STEP 2: DEFINE ACTUATORS # This example has no actuators # STEP 3: DEFINE COMPUTE_FUNC def f(in_streams, out_streams): map_element(func=lambda x: x, in_stream=in_streams[0], out_stream=out_streams[0]) # STEP 4: CREATE PROCESSES # This process has a single input stream that we call 'in' and it # has no output streams. We connect the source to the input stream # called 'in'. proc_0 = distributed_process(compute_func=f, in_stream_names=['in'], out_stream_names=['out'], connect_sources=[('in', source)], connect_actuators=[], name='proc_0') #------------------------ # proc_1 in VM_0 #------------------------ # STEP 1: DEFINE SOURCES # skip this step since proc_1 has no sources. # STEP 2: DEFINE ACTUATORS # # skip this step since proc_1 has no actuators. # STEP 3: DEFINE COMPUTE_FUNC def g(in_streams, out_streams): map_element(func=lambda x: 10 * x, in_stream=in_streams[0], out_stream=out_streams[0]) # STEP 4: CREATE PROCESSES proc_1 = distributed_process(compute_func=g, in_stream_names=['in'], out_stream_names=['out'], connect_sources=[], connect_actuators=[], name='proc_1') # STEP 5: CREATE VM vm_0 = VM(processes=[proc_0, proc_1], connections=[(proc_0, 'out', proc_1, 'in')], publishers=[(proc_1, 'out', 'sequence')]) #------------------------ #------------------------ # VM_1 #------------------------ #------------------------ #------------------------ # proc_2 in VM_1 #------------------------ # STEP 1: DEFINE SOURCES # skip this step since proc_1 has no sources. # STEP 2: DEFINE ACTUATORS # # skip this step since proc_1 has no actuators. # STEP 3: DEFINE COMPUTE_FUNC def h(in_streams, out_streams): map_element(func=lambda x: 2 * x, in_stream=in_streams[0], out_stream=out_streams[0]) # STEP 4: CREATE PROCESSES proc_2 = distributed_process(compute_func=h, in_stream_names=['in'], out_stream_names=['out'], connect_sources=[], connect_actuators=[], name='proc_2') #------------------------ # proc_3 in VM_1 #------------------------ # STEP 1: DEFINE SOURCES # skip this step since proc_1 has no sources. # STEP 2: DEFINE ACTUATORS # # skip this step since proc_1 has no actuators. # STEP 3: DEFINE COMPUTE_FUNC def pr(in_streams, out_streams): def print_element(v): print 'stream element is ', v sink_element(func=print_element, in_stream=in_streams[0]) # STEP 4: CREATE PROCESSES proc_3 = distributed_process(compute_func=pr, in_stream_names=['in'], out_stream_names=[], connect_sources=[], connect_actuators=[], name='proc_3') # STEP 5: CREATE VM vm_1 = VM(processes=[proc_2, proc_3], connections=[(proc_2, 'out', proc_3, 'in')], subscribers=[(proc_2, 'in', 'sequence')]) # STEP 6: START PROCESSES vm_0.start() vm_1.start() # STEP 7: JOIN PROCESSES vm_0.join() vm_1.join()
def detect_large_magnitude(sensor_name, filenames): # ---------------------------------------------------------------- # COMPUTE FUNCTION f # ---------------------------------------------------------------- def compute_func(in_streams, out_streams): """ Detects anomalies in streams generated by triaxial sensors. Parameters ---------- in_streams: list of Stream in_streams is a list of 3 streams indicating measurements in e, n, and z (for east, north, vertical) directions. These streams are generated by a triaxial sensor. out_streams: list of Stream out_streams has only one element, which is a Stream of int. An element of this stream is either 1.0 or 0.0. An element is 1.0 to indicate that an anomaly was detected in in_streams and is 0.0 otherwise. """ #------------------------------------------------------------------ # DECLARE INTERNAL STREAMS #------------------------------------------------------------------ # magnitudes is a stream of magnitudes of a stream of vectors # where each vector is given by its e, n, z values. magnitudes = Stream('magnitudes') anomaly_times_before_quenching = Stream('prior quench') anomaly_times_after_quenching = out_streams[0] #---------------------------------------------------- # CREATE AGENTS #---------------------------------------------------- # This agent generates streams of magnitudes of vectors # from streams of the components of the vectors. magnitude_of_vector(in_streams, out_stream=magnitudes) # This agent generates a stream of anomalies from # streams of magnitudes. simple_anomalies( in_stream=magnitudes, out_stream=anomaly_times_before_quenching, threshold=0.005) quench( in_stream=anomaly_times_before_quenching, out_stream=anomaly_times_after_quenching, QUENCH_TIME=4) # Agents that copy streams into files for later analysis. stream_to_file(anomaly_times_after_quenching, 'local_anomalies.txt') # ---------------------------------------------------------------- # DEFINE SOURCES # ---------------------------------------------------------------- def source(filename): """ This function creates a source by reading a file of floats. The source generates an element every TIME_INTERVAL seconds and stops after NUM_STEPS number of steps if NUM_STEPS is not None and outputs the entire file if NUM_STEPS is None. Parameters ---------- filename: str name of a file """ return source_float_file( filename, time_interval=0, num_steps=None).source_func directions = ['e', 'n', 'z'] proc_0 = distributed_process( compute_func=compute_func, in_stream_names=directions, out_stream_names=['out'], connect_sources=[ (directions[i], source(filenames[i])) for i in range(len(directions))] ) vm_0 = VM( processes=[proc_0], connections=[], publishers=[(proc_0, 'out', sensor_name)]) vm_0.start()
def run_test(self, fileName): vm = VM() file = os.getcwd() + '/tests/' + fileName vm.load(file) vm.start() return vm.stack.pop()