def echo_func(self, in_streams, out_streams): # IDENTIFY INPUT AND OUTPUT STREAMS. # We give names for the input and output streams # so that the code is easier to read; this is # merely a convenience. # The input and output streams are: original_sound = in_streams[0] echo = out_streams[0] echo.extend([0]*self.delay) # CREATE INTERNAL STREAMS. original_sound_plus_echo = Stream( name='original sound plus echo') # CREATE AGENTS # Make the agent that sums the input # streams --- which are echo and original # sound --- to get the original sound plus the echo. zip_map( func=sum, in_streams=[original_sound, echo], out_stream=original_sound_plus_echo) # Make the agent that creates the echo by # echoing the original sound plus the echo. window_dot_product( in_stream=original_sound_plus_echo, out_stream=echo, multiplicand_vector=self.attenuation_vector) # Agents that store sounds in files stream_to_file(in_stream=echo, filename=self.echo_name + '.txt') stream_to_file(in_stream=original_sound_plus_echo, filename='original_sound_plus_' + self.echo_name + '.txt')
def compute(in_streams): def subtract_mean(window): return window[-1] - sum(window) / float(len(window)) def magnitude_of_vector(coordinates): return math.sqrt(sum([v * v for v in coordinates])) def simple_anomaly(value, threshold): if value > threshold: return 1.0 else: return 0.0 zero_mean_streams = [ Stream('zero mean e'), Stream('zero mean n'), Stream('zero mean z') ] magnitude_stream = Stream('magnitude') anomaly_stream = Stream('anomalies') filenames = ['zero_mean_e.txt', 'zero_mean_n.txt', 'zero_mean_z.txt'] for i in range(3): map_window(func=subtract_mean, in_stream=in_streams[i], out_stream=zero_mean_streams[i], window_size=8000, step_size=1) zip_map(func=magnitude_of_vector, in_streams=zero_mean_streams, out_stream=magnitude_stream) map_element(func=simple_anomaly, in_stream=magnitude_stream, out_stream=anomaly_stream, threshold=0.1) stream_to_file(in_stream=magnitude_stream, filename='magnitude.txt') stream_to_file(in_stream=anomaly_stream, filename='anomaly.txt')
def compute_func(in_streams, out_streams): eg = example_class(multiplicand=2) check_list = [0, 2, 5, 9, 14, 20, 27, 35, 44, 54] t = Stream() map_element(func=eg.step, in_stream=in_streams[0], out_stream=t) check_correctness_of_output(in_stream=t, check_list=check_list) stream_to_file(in_stream=t, filename='map_element_example_3.dat')
def aggregate_anomalies(in_streams, out_stream, timed_window_size): """ Parameters ---------- in_streams: list of Stream Each stream in the list is a stream of floats with values 1.0 or 0.0 out_stream: Stream Stream of floats. outstream[j] is a count of the number of streams s in in_streams where s[j-window: j] contains at least one 1.0 value. The window_size allows for anomalies in different sensor streams to be treated as simultaneous provided they are within window_size of each other. """ aggregator = aggregate_large_magnitudes(num_streams=2, threshold=2) zipped_stream = Stream('time zipped stream') global_anomalies_stream = Stream('global anomalies stream') timed_zip_agent(in_streams=in_streams, out_stream=zipped_stream) timed_window(func=aggregator.func, in_stream=zipped_stream, out_stream=global_anomalies_stream, window_duration=timed_window_size, step_time=1) def get_time(timed_element): timestamp, value = timed_element time_of_high_magnitude, num_high_magnitude = value return time_of_high_magnitude stream_to_file(in_stream=global_anomalies_stream, filename='global_anomalies.txt', element_function=get_time)
def compute_func(in_streams, out_streams): import string f = string.upper check_list = map(f, source_list) t = Stream() map_element(func=f, in_stream=in_streams[0], out_stream=t) check_correctness_of_output(in_stream=t, check_list=check_list) stream_to_file(in_stream=t, filename='map_element_example_2.dat')
def compute_func(in_streams, out_streams): def f(x): return x * 10 check_list = map(f, source_list) t = Stream() map_element(func=f, in_stream=in_streams[0], out_stream=t) check_correctness_of_output(in_stream=t, check_list=check_list) stream_to_file(in_stream=t, filename='map_element_example_1.dat')
def compute_func(in_streams, out_streams): def f(lst): return [v * 2 if v % 2 else v / 2 for v in lst] check_list = f(source_list) t = Stream() map_list(func=f, in_stream=in_streams[0], out_stream=t) check_correctness_of_output(in_stream=t, check_list=check_list) stream_to_file( in_stream=t, filename='single_process_single_source_map_list_example_2.dat')
def compute_func(in_streams, out_streams): def f(x): return x < 5 check_list = filter(f, source_list) t = Stream() filter_element(func=f, in_stream=in_streams[0], out_stream=t) check_correctness_of_output(in_stream=t, check_list=check_list) stream_to_file( in_stream=t, filename='single_process_single_source_filter_example_1.dat')
def compute_func(in_streams, out_streams): merged_stream = Stream('merge of two ntp server offsets') averaged_stream = Stream('sliding window average of offsets') blend(func=lambda x: x, in_streams=in_streams, out_stream=merged_stream) map_window(func=average_of_list, in_stream=merged_stream, out_stream=averaged_stream, window_size=2, step_size=1) stream_to_file(in_stream=averaged_stream, filename='average.dat')
def compute_func(in_streams, out_streams): check_list = [1, 5, 9, 13, 17] t = Stream() map_window(func=sum, in_stream=in_streams[0], out_stream=t, window_size=2, step_size=2) check_correctness_of_output(in_stream=t, check_list=check_list) stream_to_file( in_stream=t, filename='single_process_single_source_map_window_example_1.dat')
def compute_func(in_streams, out_streams): def less_than_n(v, n): return v <= n, n + 1 check_list = [1, 3, 5, 7, 9] t = Stream() filter_element(func=less_than_n, in_stream=in_streams[0], out_stream=t, state=0) check_correctness_of_output(in_stream=t, check_list=check_list) stream_to_file(in_stream=t, filename='filter_element_example_1.dat')
def compute_func(in_streams, out_streams): # Specify internal streams. This stream is output by # the misra_gries agent and input by the stream_to_file agent. misra_gries_output_stream = Stream('Misra Gries output') # Create the misra_gries agent. misra_gries( k=num_heavy_hitters, in_stream=in_streams[0], # input from source out_stream=misra_gries_output_stream, # Goes to printer M=reporting_window_size) # Create the stream_to_file agent. stream_to_file(in_stream=misra_gries_output_stream, filename=out_filename)
def compute_func(in_streams, out_streams): bloom_filter_out_stream = Stream('Bloom output stream') count_min_sketch_out_stream = Stream('CountMinSketch output stream') membership_in_stream(in_stream=in_streams[0], out_stream=bloom_filter_out_stream, membership_object=bloom_filter) membership_in_stream(in_stream=in_streams[0], out_stream=count_min_sketch_out_stream, membership_object=count_min_sketch) stream_to_file(in_stream=bloom_filter_out_stream, filename=bloom_filter_filename) stream_to_file(in_stream=count_min_sketch_out_stream, filename=count_min_sketch_filename)
def compute_func(in_streams, out_streams): def f(lst): return lst + lst check_list = [ 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9 ] t = Stream() map_list(func=f, in_stream=in_streams[0], out_stream=t) check_correctness_of_output(in_stream=t, check_list=check_list) stream_to_file( in_stream=t, filename='single_process_single_source_map_list_example_4.dat')
def compute_func(in_streams, out_streams): # This is a simple example of a composed agent consisting # of two component agents where the composed agent has two # input streams and no output stream. # The first component agent zips the two input streams and puts # the result on its output stream t which is internal to the # network. # The second component agent puts values in its input stream t # on a file called output.dat. from sink import stream_to_file # t is an internal stream of the network t = Stream() zip_stream(in_streams=in_streams, out_stream=t) stream_to_file(in_stream=t, filename='output.dat')
def compute_func(in_streams, out_streams): def h(v): return v < 5 def f(lst): return filter(h, lst) check_list = f(source_list) t = Stream() map_list(func=f, in_stream=in_streams[0], out_stream=t) check_correctness_of_output(in_stream=t, check_list=check_list) stream_to_file( in_stream=t, filename='single_process_single_source_map_list_example_3.dat')
def compute_func(in_streams, out_streams): # This is a simple example of a composed agent consisting # of two component agents where the network has a single input # stream and no output stream. # The first component agent applies function f to each element # of in_stream, and puts the result in its output stream t. # The second component agent puts values in its input stream t # on a file called test.dat. # test.dat will contain 10, 20, 30, .... def f(x): return x * 10 t = Stream() map_element(func=f, in_stream=in_streams[0], out_stream=t) stream_to_file(in_stream=t, filename='test.dat')
def compute_func(in_streams, out_streams): """ Detects anomalies in streams generated by triaxial sensors. Parameters ---------- in_streams: list of Stream in_streams is a list of 3 streams indicating measurements in e, n, and z (for east, north, vertical) directions. These streams are generated by a triaxial sensor. out_streams: list of Stream out_streams has only one element, which is a Stream of int. An element of this stream is either 1.0 or 0.0. An element is 1.0 to indicate that an anomaly was detected in in_streams and is 0.0 otherwise. """ #------------------------------------------------------------------ # DECLARE INTERNAL STREAMS #------------------------------------------------------------------ # magnitudes is a stream of magnitudes of a stream of vectors # where each vector is given by its e, n, z values. magnitudes = Stream('magnitudes') anomaly_times_before_quenching = Stream('prior quench') anomaly_times_after_quenching = out_streams[0] #---------------------------------------------------- # CREATE AGENTS #---------------------------------------------------- # This agent generates streams of magnitudes of vectors # from streams of the components of the vectors. magnitude_of_vector(in_streams, out_stream=magnitudes) # This agent generates a stream of anomalies from # streams of magnitudes. simple_anomalies( in_stream=magnitudes, out_stream=anomaly_times_before_quenching, threshold=0.005) quench( in_stream=anomaly_times_before_quenching, out_stream=anomaly_times_after_quenching, QUENCH_TIME=4) # Agents that copy streams into files for later analysis. stream_to_file(anomaly_times_after_quenching, 'local_anomalies.txt')
def compute(in_stream): def subtract_mean(window): return window[-1] - sum(window) / float(len(window)) zero_mean_stream = Stream('zero mean') input_stream = Stream('input') map_window(func=subtract_mean, in_stream=in_stream, out_stream=zero_mean_stream, window_size=50, step_size=1) map_window(func=lambda window: window[-1], in_stream=in_stream, out_stream=input_stream, window_size=50, step_size=1) stream_to_file(in_stream=zero_mean_stream, filename='zero_mean_z.txt')
def g(in_streams, out_streams): """ Parameters ---------- in_streams: list of Stream in_streams is a list of anomaly streams with one stream from each sensor. An anomaly stream is a sequence of 0.0 and 1.0 where 0.0 indicates no anomaly and 1.0 indicates an anomaly. out_streams: list of Stream This list consists of a single stream that contains 0.0 when no global anomaly across all sensors is detected and 1.0 when a global anomaly is detected. """ # DECLARE STREAMS # Internal steam used in g regional_anomalies = Stream('Regional anomalies') # CREATE AGENTS # 1. aggregation agent # Define the terminating function def aggregate(windows): number_local_anomalies = [any(window) for window in windows].count(True) if number_local_anomalies > 1: return 1.0 else: return 0.0 # Wrap the terminating function to create an agent merge_window(func=aggregate, in_streams=in_streams, out_stream=regional_anomalies, window_size=250, step_size=1, initial_value=0.0) # 2. agent that copies stream to file for i in range(len(in_streams)): stream_to_file(in_streams[i], 'Anomalies_' + str(i + 1) + '_.txt') stream_to_file(regional_anomalies, 'regional_anomalies.txt')
def aggregate(self, in_streams, out_streams): # IDENTIFY INPUT AND OUTPUT STREAMS. # original_sound = in_streams[0] # echoes = in_streams[1:] # original_stream_copy = out_streams[0] # heard_sound = out_streams[1] # CREATE INTERNAL STREAMS. # This agent has no internal streams. # CREATE AGENTS # Create agent that creates heard sound by summing # original sound and and all echoes. zip_map(sum, in_streams, out_streams[1]) # Copy original stream to an output stream. copy_stream(in_streams[0], out_streams[0]) # Create agents that store sounds in files. stream_to_file(out_streams[1], self.output_file_name) stream_to_file(in_streams[0], 'original_sound.txt')
def compute_func(in_streams, out_streams): ## F1 - Amplitude Adjustment def delay_and_attenuate(in_streams): tmp = deepcopy(in_streams) for i in range(len(tmp)): tmp[i] *= alpha #print("Length of tmp==tmp1 --",len(tmp)==len(tmp1)) return tmp t = Stream(initial_value=[0] * delay) w = Stream() map_list(func=delay_and_attenuate, in_stream=in_streams[0], out_stream=t) zip_map(sum, [t, in_streams[0]], w) stream_to_file(in_stream=w, filename='Reverb.dat')
def compute_func(in_streams, out_streams): # Name external streams for convenience original_sound = in_streams[0] heard_sound = out_streams[0] # Define internal streams echo = Stream(name='echo', initial_value=[0]*delay) # Create agents # Agent that creates heard sound from original sound and echo zip_map(func=sum, in_streams=[original_sound, echo], out_stream=heard_sound) # Agent that creates the echo from the heard sound. window_dot_product( in_stream=heard_sound, out_stream=echo, multiplicand_vector=attenuation_vector) # Agents that store sounds in files stream_to_file(in_stream=heard_sound, filename='heard.txt') stream_to_file(in_stream=echo, filename='echo.txt') stream_to_file(in_stream=original_sound, filename='original_sound.txt')
def compute(in_streams, out_streams): merged_stream = Stream('merge of two ntp server offsets') blend(func=identity, in_streams=in_streams, out_stream=merged_stream) stream_to_file(in_stream=merged_stream, filename='offsets.dat')
def compute_1(in_streams, out_streams): result_stream = Stream('result of computation') map_element(func=lambda x: 200 * x, in_stream=in_streams[0], out_stream=result_stream) stream_to_file(in_stream=result_stream, filename='result.dat')
def compute_1(in_streams, out_streams): stream_to_file(in_stream=in_streams[0], filename='result_1.dat')
def compute_3(in_streams, out_streams): t = Stream() zip_stream(in_streams, t) stream_to_file(t, 'result_2.dat')
def compute_func_3(in_streams, out_streams): t = Stream() zip_stream(in_streams, out_stream=t) stream_to_file(in_stream=t, filename='result.dat')
def g(in_streams, out_streams): """ Compute Function for Picker Parameters ---------- in_streams: list of input Streams passed from sensor reader process (f) in_streams[0] - acceleration N in_streams[1] - acceleration E in_streams[2] - acceleration Z in_streams[3] - timestamp """ # DECLARE STREAMS adjusted_acc = [ Stream('adjusted_acc_{}'.format(i)) for i in range(len(in_streams) - 1) ] adjusted_timestamp = Stream('adjusted_timestamp') merged_acc = Stream('acc_merged') filtered_acc = Stream('filtered_acc') quenched_acc = Stream('quenched_acc') # DEFINE AGENTS # 1. ADJUST LTA - subtract long-term-average from sample data def adjust_lta(window): return abs(window[-1] - sum(window) / len(window)) for i in range(len(in_streams) - 1): map_window(func=adjust_lta, in_stream=in_streams[i], out_stream=adjusted_acc[i], window_size=LTA_COUNT, step_size=1) # 2. ADJUST TIMESTAMP - obtain timestamp corresponding to each window def adjust_timestamp(window): return window[-1] map_window(func=adjust_timestamp, in_stream=in_streams[-1], out_stream=adjusted_timestamp, window_size=LTA_COUNT, step_size=1) # 3. ZIP STREAM - zip acceleration and timestamp streams zip_stream(in_streams=adjusted_acc + [adjusted_timestamp], out_stream=merged_acc) # 4. DETECT ANOMALY - filter out small magnitude to report only large acceleration def detect_anomaly(v): return any(map(lambda x: x > PICKER_THRESHOLD, v[:-1])) filter_element(func=detect_anomaly, in_stream=merged_acc, out_stream=filtered_acc) # 5. QUENCH PICKER def quench_picker(v, state): timestamp = v[3] if timestamp - state < MINIMUM_REPICK_INTERVAL_SECONDS: return _no_value, state else: state = timestamp return v, state map_element(func=quench_picker, in_stream=filtered_acc, out_stream=quenched_acc, state=0) # 6. STREAM RESULTS TO FILE - for test purposes stream_to_file(quenched_acc, './phidget_data.txt')
def detect_large_magnitudes(in_streams, out_streams): """ Detects anomalies in streams generated by triaxial sensors. Parameters ---------- in_streams: list of Stream in_streams is a list of 3 streams indicating measurements in e, n, and z (for east, north, vertical) directions. These streams are generated by a triaxial sensor. out_streams: list of Stream out_streams has only one element, which is a Stream of int. An element of this stream is either 1.0 or 0.0. An element is 1.0 to indicate that an anomaly was detected in in_streams and is 0.0 otherwise. """ #------------------------------------------------------------------ # DECLARE INTERNAL STREAMS #------------------------------------------------------------------ # demeaned is a list of 3 streams which are the streams from the # source with their means subtracted. demeaned = [Stream('demeaned_' + str(i)) for i in range(3)] # magnitudes is a stream of magnitudes of samples magnitudes = Stream('magnitudes') # Times are integers. # anomaly_times_before_quenching are times of anomalously high # magnitudes anomaly_times_before_quenching = Stream('prior quench') # anomaly_times_after_quenching is the same as # anomaly_times_before_quenching after discarding anomalies that # are too close together in time. anomaly_times_after_quenching = Stream('after quench') #---------------------------------------------------- # CREATE AGENTS #---------------------------------------------------- # Subtract means from source streams. for i in range(3): subtract_mean(in_stream=in_streams[i], out_stream=demeaned[i], window_size=100, step_size=100) # Compute magnitudes of vector samples. # This agent generates streams of magnitudes of vectors # from streams of the components of the vectors. # demeaned is a list of streams with zero means. # magnitudes is a single stream of vector magnitudes. magnitude_of_vector(in_streams=demeaned, out_stream=magnitudes) # This agent generates a stream of anomaly times from # streams of magnitudes. The stream of anomaly times is # a stream of timestamps where each timestamp is the # time at which an anomaly occurred. simple_anomalies(in_stream=magnitudes, out_stream=anomaly_times_before_quenching, MAGNITUDE_THRESHOLD=0.0001) # This agent discards timestamps that are closer # together than QUENCH_TIME. # anomaly_times_after_quenching is a stream that contains # timestamps at which anomalies occur and that are separated # by at least QUENCH_TIME. quench(in_stream=anomaly_times_before_quenching, out_stream=anomaly_times_after_quenching, QUENCH_TIME=5) # Agents that copy streams into files for later analysis. stream_to_file(anomaly_times_after_quenching, 'local_anomalies.txt') return