def split_list_f(func, in_stream, num_out_streams, state=None, *args, **kwargs): out_streams = [] for i in range(num_out_streams): out_streams.append(Stream(func.__name__ + in_stream.name + str(i))) split_list(func, in_stream, out_streams, state, None, None, *args, **kwargs) return out_streams
def multi_element_f(func, in_streams, num_out_streams, state=None, *args, **kwargs): out_streams = [Stream() for _ in range(num_out_streams)] call_streams = None name = None multi_element(func, in_streams, out_streams, state, call_streams, name, *args, **kwargs) return out_streams
def select_stream_from_entry(self): """ Gets the values from the ui elements, and executes the program in json mode, to determine if the values are valid """ url = self.url_ui.text() split_url = url.split() self.messages_ui.append('Trying to open stream: {}'.format(url)) stream = Stream(split_url) stream.start(self.messages_ui)
def compute_func(in_streams, out_streams): check_list = [1, 5, 9, 13, 17] t = Stream() map_window(func=sum, in_stream=in_streams[0], out_stream=t, window_size=2, step_size=2) check_correctness_of_output(in_stream=t, check_list=check_list) stream_to_file( in_stream=t, filename='single_process_single_source_map_window_example_1.dat')
def compute_func(in_streams, out_streams): def less_than_n(v, n): return v <= n, n + 1 check_list = [1, 3, 5, 7, 9] t = Stream() filter_element(func=less_than_n, in_stream=in_streams[0], out_stream=t, state=0) check_correctness_of_output(in_stream=t, check_list=check_list) stream_to_file(in_stream=t, filename='filter_element_example_1.dat')
def __init__(self, dev=None, headset_id=None, rate=None): Headset.__init__(self, headset_id) self.device = dev self.bauderate = rate self.stream = Stream(device=self.device, bauderate=rate, version=Version.MINDWAVE) time.sleep(2) self.connect() self.run(self.stream)
def addToStream(self, point): if self.start == (0, 0): # if it is the second point of that reach - set start equal to that point # NOTE - first point is just marked in the array - array_done self.start = point stream = Stream( point) # create a new Stream type variable for this point self.list_Stream.append( stream ) # add this Stream type variable to the list - list_Stream of the current reach self.end = point # mark the latest point as the end point return
def multi_list_f(func, in_streams, num_out_streams, state=None, *args, **kwargs): out_streams = [] for i in range(num_out_streams): out_streams.append(Stream(func.__name__ + str(i))) multi_list(func, in_streams, out_streams, state, None, None, *args, **kwargs) return out_streams
def g_function(): from op import map_element t = Stream('t') u = Stream('u') t1 = Stream('t1') def g_print(y): return y*2 def gg_print(y): print 'In g_function. gg_print() y is', y return 100*y map_element( func=g_print, in_stream=t, out_stream=t1, name='b') map_element( func=gg_print, in_stream=t1, out_stream=u, name='b1') sources = [] in_streams = [t] out_streams = [u] name_to_stream = {s.name: s for s in in_streams} Stream.scheduler.name_to_stream = name_to_stream return sources, in_streams, out_streams
def compute_func(in_streams, out_streams): # Specify internal streams. This stream is output by # the misra_gries agent and input by the stream_to_file agent. misra_gries_output_stream = Stream('Misra Gries output') # Create the misra_gries agent. misra_gries( k=num_heavy_hitters, in_stream=in_streams[0], # input from source out_stream=misra_gries_output_stream, # Goes to printer M=reporting_window_size) # Create the stream_to_file agent. stream_to_file(in_stream=misra_gries_output_stream, filename=out_filename)
def test_heavy_hitters_stream(): heavy_hitters_object = HeavyHitters(width=1000, depth=5) x = Stream('input') ## y = Stream('output') y = ggg(x, heavy_hitters_object=heavy_hitters_object) #heavy_hitters_stream(x, y, heavy_hitters_object) x.extend([('add', 'a'), ('add', 'a'), ('add', 'a'), ('add', 'b'), ('heavy_hitters'), ('add', 'a'), ('add', 'b'), ('add', 'c'), ('add', 'a'), ('heavy_hitters'), ('add', 'b'), ('add', 'c'), ('add', 'b'), ('add', 'b'), ('heavy_hitters')]) run() #Stream.scheduler.step() print(recent_values(y))
def compute_func(in_streams, out_streams): def f(lst): return lst + lst check_list = [ 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9 ] t = Stream() map_list(func=f, in_stream=in_streams[0], out_stream=t) check_correctness_of_output(in_stream=t, check_list=check_list) stream_to_file( in_stream=t, filename='single_process_single_source_map_list_example_4.dat')
def split_window_f(func, in_stream, window_size, step_size, state=None, *args, **kwargs): out_streams = [] for i in range(num_out_streams): out_streams.append(Stream(func.__name__ + in_stream.name + str(i))) split_window(func, in_stream, out_streams, window_size, step_size, None, None, state, *args, **kwargs) return out_stream
def test_timed_window(): scheduler = Stream.scheduler x = Stream('x') y = Stream('y') def f(v): return v timed_window(func=f, in_stream=x, out_stream=y, window_duration=10, step_time=10) x.extend([(1, 'a'), (8, 'b'), (12, 'c'), (14, 'd'), (32, 'e'), (50, 'f')]) scheduler.step() assert recent_values(y) == [(10, [(1, 'a'), (8, 'b')]), (20, [(12, 'c'), (14, 'd')]), (40, [(32, 'e')])] return
def simple_zip_map_test(): # Get scheduler scheduler = Stream.scheduler # Define streams x = Stream('x') y = Stream('y') z = Stream('z') # Define functions which are encapsulated def f(lst): return 2 * sum(lst) # Define agents zip_map(func=f, in_streams=[x, y], out_stream=z) # A STEP # Put test data into input streams x.extend(list(range(4))) y.extend(list(range(10, 20, 2))) # Execute a step run() # Look at output data assert recent_values(z) == [20, 26, 32, 38] # Put test data into input streams x.extend([82, 10]) y.extend([-10, 200, 300]) # Execute a step scheduler.step() # Look at output data assert recent_values(z) == [20, 26, 32, 38, 200, 0] # Put test data into input streams x.extend([-200, -300]) # Execute a step scheduler.step() # Look at output data assert recent_values(z) == [20, 26, 32, 38, 200, 0, 0, 0]
def initialize_coefficient_stream(self, compute_coefficients): """ Initializes the coefficient stream. INPUT: compute_coefficients TESTS:: sage: from sage.combinat.species.series_order import inf, unk sage: L = LazyPowerSeriesRing(QQ) sage: f = L() sage: compute_coefficients = lambda ao: iter(ZZ) sage: f.order = inf sage: f.aorder = inf sage: f.initialize_coefficient_stream(compute_coefficients) sage: f.coefficients(5) [0, 0, 0, 0, 0] :: sage: f = L() sage: compute_coefficients = lambda ao: iter(ZZ) sage: f.order = 1 sage: f.aorder = 1 sage: f.initialize_coefficient_stream(compute_coefficients) sage: f.coefficients(5) [0, 1, -1, 2, -2] """ ao = self.aorder assert ao != unk if ao == inf: self.order = inf self._stream = Stream(0) else: self._stream = Stream(compute_coefficients(ao)) self.is_initialized = True
def test_kmeans_streams(): s = Stream() t = Stream() km = kmeans_stream(n_clusters=2) @map_e def g(v): return km.process_element(v) g(in_stream=s, out_stream=t) s.append(('add', [1, 2])) s.append(('add', [1, 4])) s.append(('add', [1, 0])) s.append(('add', [10, 4])) s.append(('add', [10, 0])) s.append(('add', [10, 2])) s.append('cluster') s.append('show') ## s.extend([('add', [1, 2]), ('add', [1, 4]), ('add', [1, 0]), ## ('add', [10, 4]), ('add', [10, 0]), ('add', [10, 2])]) run() print(recent_values(t))
def __init__(self, class_file, stream: Stream): self.max_stack = stream.read_u2() self.max_locals = stream.read_u2() code_length = stream.read_u4() code = stream.read_bytes(code_length) # noinspection PyTypeChecker self.instructions: List[Instruction] = self.read_bytecode( code_length, Stream(BytesIO(code))) self.exception_table = [ ExceptionTableEntry(stream) for _ in range(stream.read_u2()) ] self.attributes = [ Attribute.read(class_file, stream) for _ in range(stream.read_u2()) ]
def f_function(): from source import source_function from op import map_element s = Stream('s') t = Stream('t') def ff(x): return x*10 def gg(state): return state+1, state+1 map_element( func=ff, in_stream=s, out_stream=t, name='aaaa') ss = source_function( func=gg, stream_name='s', time_interval=0.1, num_steps=10, state=0, window_size=1, name='source') sources = [ss] in_streams = [s] out_streams = [t] name_to_stream = {s.name: s for s in in_streams} Stream.scheduler.name_to_stream = name_to_stream return sources, in_streams, out_streams
def compute_func(in_streams, out_streams): def h(v): return v < 5 def f(lst): return filter(h, lst) check_list = f(source_list) t = Stream() map_list(func=f, in_stream=in_streams[0], out_stream=t) check_correctness_of_output(in_stream=t, check_list=check_list) stream_to_file( in_stream=t, filename='single_process_single_source_map_list_example_3.dat')
def main(): exact_counter = dict() mg = MisraGries(20) m = 1000000 s = Stream(m, (0, 1000)) for a in s: if a not in exact_counter: exact_counter[a] = 1 else: exact_counter[a] += 1 mg.update(a) for key in exact_counter: print('Value {}, estimated freq = {}, real freq = {}'.format( key, mg.estimate_frequency(key), exact_counter[key]))
def main_loop(self): while True: # Add a new stream if not too many... if len(self.streams) < STREAM_MAX and random.random() < STREAM_GEN: stream = Stream(self.screen, self.font, self.direction) self.streams.append(stream) self._handle_input() self._process_game_logic() self._draw() self.streams = [s for s in self.streams if s.visible] # only keep the visible ones
def add_source(self, name, stream=None): ''' Add source by name (with optional log stream). ''' module = importlib.import_module('sources.' + name) # If no log stream is supplied one will be created if stream is None: os.makedirs('logs', exist_ok=True) # Logs are shared by root module if '.' in name: name = name.split('.')[0] log = os.path.join('logs', name + '.txt') stream = Stream(out=self.out, log=log) source = module.Source(stream) self._sources.append(source) return source
def generate_from_data(data_id): path = 'data/original_problems/' + data_id + '.dat' f = open(path, 'r') lines = f.readlines() f.close() elements = [line.split() for line in lines] DTmin = float(elements[3][1]) elements = elements[4:] streams = [Stream(Tin = float(e[1]), Tout = float(e[2]), FCp = float(e[3])) for e in elements if e[0][1]!='U'] utilities = [Utility(Tin = float(e[1]),Tout = float(e[2]), cost = float(e[3])) for e in elements if e[0][1]=='U'] return Min_Utility_Problem(streams, utilities, DTmin)
def compute_func(in_streams, out_streams): # This is a simple example of a composed agent consisting # of two component agents where the composed agent has two # input streams and no output stream. # The first component agent zips the two input streams and puts # the result on its output stream t which is internal to the # network. # The second component agent puts values in its input stream t # on a file called output.dat. from sink import stream_to_file # t is an internal stream of the network t = Stream() zip_stream(in_streams=in_streams, out_stream=t) stream_to_file(in_stream=t, filename='output.dat')
def test_sink(): # Test sink with state @sink_e def f(v, state, addend, output_list): output_list.append(v+state) state +=addend return state s = Stream() output_list = [] f(s, state=0, addend=10, output_list=output_list) s.extend(list(range(5))) run() assert output_list == [0, 11, 22, 33, 44]
def __init__(self): pygame.init() self.outputs = Outputs() self.stream = Stream(channels=1, sample_rate=60 * 10**3, sample_size=2**11) self.mouse_frequency = 0.0 # visual params self.background_color = pygame.Color(50, 50, 50) self.colorA = pygame.Color("#ff0000") self.colorB = pygame.Color("#0000ff") self.num_bars = self.outputs.get_divisor() # surface params self.height = 1000 self.dimensions = numpy.array([self.outputs.get_width(), self.height]) self.surface_flags = pygame.HWSURFACE | pygame.DOUBLEBUF self.surface = pygame.display.set_mode(self.dimensions, self.surface_flags) self.time_surface = pygame.Surface(self.dimensions // numpy.array([1, 2])) self.freq_surface = pygame.Surface(self.dimensions // numpy.array([1, 2])) self.control_surface = pygame.Surface(self.dimensions // 2) self.control_surface.set_colorkey(self.background_color) self.controls = Controls(self.control_surface) self.sliders = { 'pull': Slider(self.control_surface, pygame.Rect(300, 46, 100, 10), 10, 15, value=0.5), 'smooth': Slider(self.control_surface, pygame.Rect(300, 66, 100, 10), 10, 15, value=0.5) } # smoothing history array self.t_history = numpy.full(self.num_bars, 0.5) self.f_history = numpy.full(self.num_bars, 0.0)
def match(self, token_stream): groups = Stream() while True: e = token_stream.peek() if self.filter_func(e): groups.append(e) next(token_stream) else: break d = token_stream.peek() if d == self.delimiter: next(token_stream) else: break return Match(True, groups=groups)
def select_stream_from_link(self, tableWidgetItem): row = tableWidgetItem.row() urlItem = self.links_ui.item(row, 0) # the url is in the first column url = urlItem.text() split_url = url.split() self.messages_ui.append('Trying to open stream: {}'.format(url)) stream = Stream(split_url) stream.start(self.messages_ui)
def pick_orientation(scaled, timestamps, orientation): """ Sends picks on a single orientation, either 'n', 'e', or 'z'. """ # --------------------------------------------------------------- # CREATE AGENTS AND STREAMS # --------------------------------------------------------------- # 1. DECIMATE SCALED DATA. # Window of size DECIMATION is decimated to its average. decimated = Stream('decimated') map_window(lambda v: sum(v) / float(len(v)), scaled, decimated, window_size=self.decimation, step_size=self.decimation) # 2. DECIMATE TIMESTAMPS. # Window of size DECIMATION is decimated to its last value. decimated_timestamps = Stream('decimated_timestamps') map_window(lambda window: window[-1], timestamps, decimated_timestamps, window_size=self.decimation, step_size=self.decimation) # 3. DEMEAN (subtract mean from) DECIMATED STREAM. # Subtract mean of window from the window's last value. # Move sliding window forward by 1 step. demeaned = Stream('demeaned', initial_value=[0.0] * (LTA_count - 1)) map_window(lambda window: window[-1] - sum(window) / float(len(window)), decimated, demeaned, window_size=LTA_count, step_size=1) # 4. MERGE TIMESTAMPS WITH DEMEANED ACCELERATIONS. # Merges decimated_timestamps and demeaned to get timestamped_data. timestamped_data = Stream('timestamped_data') zip_streams(in_streams=[decimated_timestamps, demeaned], out_stream=timestamped_data) # 5. DETECT PICKS. # Output a pick if the value part of the time_value (t_v) exceeds threshold. picks = Stream('picks') filter_element(lambda t_v: abs(t_v[1]) > self.pick_threshold, timestamped_data, picks) # 6. QUENCH PICKS. # An element is a (timestamp, value). # Start a new quench when timestamp > QUENCH_PERIOD + last_quench. # Update the last quench when a new quench is initiated. # Initially the last_quench (i.e. state) is 0. quenched_picks = Stream('quenched_picks') # f is the filtering function def f(timestamped_value, last_quench, QUENCH_PERIOD): timestamp, value = timestamped_value new_quench = timestamp > QUENCH_PERIOD + last_quench last_quench = timestamp if new_quench else last_quench # return filter condition (new_quench) and next state (last_quench) return new_quench, last_quench filter_element(f, picks, quenched_picks, state=0, QUENCH_PERIOD=2) # 7. SEND QUENCHED PICKS. self.send_event(quenched_picks)