def _on_accept(self): _logger.debug('_on_accept') while True: try: sock, addr = self._fd.accept() _logger.debug('fd: %d accept fd: %d', self._fd.fileno(), sock.fileno()) except socket.error as msg: if msg.errno == errno.ECONNABORTED: continue if msg.errno != errno.EAGAIN and msg.errno != errno.EINPROGRESS: _logger.error('fd: %d, accept: %s', self._fd.fileno(), os.strerror(msg.errno)) self._fd.close() if self._onClosed is not None: try: self._onClosed(self) except Exception as ex: _logger.error('_onClosed: %s', str(ex)) _logger.exception(traceback.format_exc()) return else: new_stream = Stream(sock, prefix=self._prefix) new_stream._connected = True try: self._onAccepted(new_stream, addr) except Exception as e: _logger.error('_onAccepted: %s', e) _logger.exception(traceback.format_exc()) new_stream.close()
class Filter(MiniEngine): def __init__(self, input, predicate): MiniEngine.__init__(self) self._input = input self._predicate = predicate assert self._predicate.accepts(self._input.schema()) self._input_ep = self._input.connect() self._output = Stream(self._input.schema(), self._input.sort_order(), 'Filter') def output(self): return self._output def run(self): closed = False while not closed: try: r = self._input_ep.receive() if type(r) is StopWord: # Send if the record is a stop word self._output.send(r) elif self._predicate(r): # Send if the record satisfies the predicate. self._output.send(r) self._input_ep.processed() except StreamClosedException: closed = True print 'Closing FILTER stream' self._output.close()
class ArrayStreamer(MiniEngine): def __init__(self, schema, data, sort_order=None): MiniEngine.__init__(self) self._schema = schema self._data = data self._output_stream = Stream(self._schema, sort_order or SortOrder(), 'ARRAY STREAMER') def output(self): return self._output_stream def run(self): for r in self._data: self._output_stream.send(r) print 'Closing ARRAY stream' self._output_stream.close()
class Select(MiniEngine): def __init__(self, input, transformer): MiniEngine.__init__(self) self._input = input self._input_ep = input.connect() self._t = transformer # make sure the transformer can handle the records in the stream assert self._t.accepts(input.schema()) # We cannot reliably determine the sort order of the output stream # as the transformation applied to the attributes is unkown. If # for example the transformer inverts the value of an attribute the # attribute still has the same time and possibly name, but the # values in fact should now be sorted in reverse order. output_order = SortOrder() # Construct the output stream. self._output_stream = Stream( self._t.schema(), output_order, 'SELECT' ) def output(self): return self._output_stream def run(self): closed = False while not closed: try: # print 'SELECT: waiting to receive' r = self._input_ep.receive() if type(r) is StopWord: # print 'SELECT: got stop word' self._output_stream.send(r) else: # print 'SELECT: got record' self._output_stream.send(self._t(r)) self._input_ep.processed() except StreamClosedException: closed = True self._output_stream.close() print 'Closing SELECT stream'
class Aggregate(MiniEngine): def __init__(self, input, aggregator): MiniEngine.__init__(self) self._input = input self._input_ep = self._input.connect() self._a = aggregator assert self._a.accepts(self._input.schema()) self._output = Stream( self._input.schema(), self._input.sort_order(), 'Aggregate' ) def output(self): return self._output def run(self): closed = False # Initialize aggregate self._a.init() while not closed: try: r = self._input_ep.receive() if type(r) is StopWord: # If the record is a stop word send the current # aggregate value if anything was aggregated. if self._a.count(): self._output.send(self._a.record()) # Additionally send the stop word self._output.send(r) # Reset the aggregate value self._a.init() else: # Add the current record to the aggregate self._a(r) self._input_ep.processed() except StreamClosedException: closed = True print 'Closing AGGREGATE stream' self._output.close()
class ArrayStreamer(MiniEngine): def __init__(self, schema, data, sort_order = None): MiniEngine.__init__(self) self._schema = schema self._data = data self._output_stream = Stream( self._schema, sort_order or SortOrder(), 'ARRAY STREAMER' ) def output(self): return self._output_stream def run(self): for r in self._data: self._output_stream.send(r) print 'Closing ARRAY stream' self._output_stream.close()
class Select(MiniEngine): def __init__(self, input, transformer): MiniEngine.__init__(self) self._input = input self._input_ep = input.connect() self._t = transformer # make sure the transformer can handle the records in the stream assert self._t.accepts(input.schema()) # We cannot reliably determine the sort order of the output stream # as the transformation applied to the attributes is unkown. If # for example the transformer inverts the value of an attribute the # attribute still has the same time and possibly name, but the # values in fact should now be sorted in reverse order. output_order = SortOrder() # Construct the output stream. self._output_stream = Stream(self._t.schema(), output_order, 'SELECT') def output(self): return self._output_stream def run(self): closed = False while not closed: try: # print 'SELECT: waiting to receive' r = self._input_ep.receive() if type(r) is StopWord: # print 'SELECT: got stop word' self._output_stream.send(r) else: # print 'SELECT: got record' self._output_stream.send(self._t(r)) self._input_ep.processed() except StreamClosedException: closed = True self._output_stream.close() print 'Closing SELECT stream'
class Aggregate(MiniEngine): def __init__(self, input, aggregator): MiniEngine.__init__(self) self._input = input self._input_ep = self._input.connect() self._a = aggregator assert self._a.accepts(self._input.schema()) self._output = Stream(self._input.schema(), self._input.sort_order(), 'Aggregate') def output(self): return self._output def run(self): closed = False # Initialize aggregate self._a.init() while not closed: try: r = self._input_ep.receive() if type(r) is StopWord: # If the record is a stop word send the current # aggregate value if anything was aggregated. if self._a.count(): self._output.send(self._a.record()) # Additionally send the stop word self._output.send(r) # Reset the aggregate value self._a.init() else: # Add the current record to the aggregate self._a(r) self._input_ep.processed() except StreamClosedException: closed = True print 'Closing AGGREGATE stream' self._output.close()
class Component(): def __init__(self, morse, name, fqn, stream = None, port = None, services = []): self._morse = morse self.name = name self.fqn = fqn # fully qualified name if stream == 'IN': self.stream = Stream(self._morse.com, port) self.publish = self.stream.publish elif stream == 'OUT': self.stream = Stream(self._morse.com, port) self.get = self.stream.get self.last = self.stream.last self.subscribe = self.stream.subscribe self.unsubscribe = self.stream.unsubscribe else: self.stream = None for s in services: pymorselogger.debug("Adding service %s to component %s" % (s, self.name)) self._add_service(s) def _add_service(self, m): def innermethod(*args): pymorselogger.debug("Sending asynchronous request %s with args %s." % (m, args)) req = self._morse._make_request(self.fqn, m, *args) future = self._morse.executor.submit(self._morse._execute_rpc, req) #TODO: find a way to throw an execption in the main thread # if the RPC request fails at invokation for stupid reasons # like wrong # of params return future innermethod.__doc__ = "This method is a proxy for the MORSE %s service." % m innermethod.__name__ = str(m) setattr(self,innermethod.__name__,innermethod) def close(self): if self.stream: self.stream.close()
def waitForOthers(self): recentlyJoined = True def putFunc(data): nonlocal recentlyJoined for p in data: success = self.engine.addPlayer(p) if recentlyJoined and success: print(f"{p} is already in the room") elif success: print(f"{p} joined the room") recentlyJoined = False playerStream = Stream(putFunc) firebaseutils.listenToPlayers(playerStream, self.roomKey) if self.localPlayer.isHost: self.waitForStart() playerStream.close() self.engine.startGame() self.startGame() else: def shouldStartGame(stillWaiting): if not stillWaiting: clearScreen() print("The host has started the game.") startStream.close() playerStream.close() try: self.loadGame() except firebaseutils.FirebaseError: self.exitWithError() startStream = Stream(shouldStartGame) firebaseutils.listenForStart(startStream, self.roomKey)
class Filter(MiniEngine): def __init__(self, input, predicate): MiniEngine.__init__(self) self._input = input self._predicate = predicate assert self._predicate.accepts(self._input.schema()) self._input_ep = self._input.connect() self._output = Stream( self._input.schema(), self._input.sort_order(), 'Filter' ) def output(self): return self._output def run(self): closed = False while not closed: try: r = self._input_ep.receive() if type(r) is StopWord: # Send if the record is a stop word self._output.send(r) elif self._predicate(r): # Send if the record satisfies the predicate. self._output.send(r) self._input_ep.processed() except StreamClosedException: closed = True print 'Closing FILTER stream' self._output.close()
class Waves(object): def __init__(self): pygame.init() self.outputs = Outputs() self.stream = Stream(channels=1, sample_rate=60 * 10**3, sample_size=2**11) self.mouse_frequency = 0.0 # visual params self.background_color = pygame.Color(50, 50, 50) self.colorA = pygame.Color("#ff0000") self.colorB = pygame.Color("#0000ff") self.num_bars = self.outputs.get_divisor() # surface params self.height = 1000 self.dimensions = numpy.array([self.outputs.get_width(), self.height]) self.surface_flags = pygame.HWSURFACE | pygame.DOUBLEBUF self.surface = pygame.display.set_mode(self.dimensions, self.surface_flags) self.time_surface = pygame.Surface(self.dimensions // numpy.array([1, 2])) self.freq_surface = pygame.Surface(self.dimensions // numpy.array([1, 2])) self.control_surface = pygame.Surface(self.dimensions // 2) self.control_surface.set_colorkey(self.background_color) self.controls = Controls(self.control_surface) self.sliders = { 'pull': Slider(self.control_surface, pygame.Rect(300, 46, 100, 10), 10, 15, value=0.5), 'smooth': Slider(self.control_surface, pygame.Rect(300, 66, 100, 10), 10, 15, value=0.5) } # smoothing history array self.t_history = numpy.full(self.num_bars, 0.5) self.f_history = numpy.full(self.num_bars, 0.0) def get_samples(self): format = '<{}h'.format(self.stream.sample_size) byte_string = self.stream.read(self.stream.sample_size) return list(map(util.normalize, struct.unpack(format, byte_string))) def draw_time_bars(self, samples, surface): width, height = surface.get_size() bar_width = width / self.num_bars s = self.sliders['smooth'].value for i in range(self.num_bars): power_i = samples[i] power_s = self.t_history[i] * s + power_i * (1 - s) power = self.t_history[i] = power_s bar_height = power * height top = height - bar_height left = i * bar_width rect = (left, top, bar_width, 5) #bar_height) color = util.gradient(power, self.colorA, self.colorB) pygame.draw.rect(surface, color, rect) def draw_freq_bars(self, samples, surface): width, height = surface.get_size() y_max = self.stream.sample_size // 2 bar_width = width / self.num_bars yf = numpy.log(numpy.abs(numpy.fft.fft(samples)) + 1) / numpy.log(y_max) s = self.sliders['smooth'].value pull = 1 - self.sliders['pull'].value g = (self.num_bars - 1) * (self.stream.sample_size // 2 - 1) * pull v, h = util.shift_inverse_consts(0, 1, self.num_bars - 1, self.stream.sample_size // 2 - 1, g) for x in range(self.num_bars): y = util.shift_inverse(x, g, v, h) power_i = yf[int(y)] power_s = self.f_history[x] * s + power_i * (1 - s) power = self.f_history[x] = power_s if power > 1.0: power = 1.0 bar_height = power * height top = height - bar_height left = x * bar_width rect = (left, top, bar_width, bar_height) color = util.gradient(power, self.colorA, self.colorB) pygame.draw.rect(surface, color, rect) def resize_bars(self): self.num_bars = self.outputs.get_divisor() self.t_history.resize(self.num_bars) self.f_history.resize(self.num_bars) def resize(self): width = self.outputs.get_width() height = self.height self.time_surface = pygame.Surface((width, height // 2)) self.freq_surface = pygame.Surface((width, height // 2)) self.surface = pygame.display.set_mode((width, height), self.surface_flags) self.resize_bars() def process_key(self, key): HEIGHT_DELTA = 100 HEIGHT_MIN = 300 SIZE_MIN = 1 RATE_DELTA = 1000 RATE_MIN = 0 mods = pygame.key.get_mods() shift = mods & pygame.KMOD_SHIFT if key == ord('b'): if shift: self.outputs.next_divisor() else: self.outputs.prev_divisor() self.resize_bars() if key == ord('h'): if shift: self.height += HEIGHT_DELTA elif self.height > HEIGHT_MIN: self.height -= HEIGHT_DELTA self.resize() if key == ord('n'): k = 2 if shift else 0.5 if self.stream.sample_size > SIZE_MIN: self.stream.sample_size *= k if key == ord('r'): k = 1 if shift else -1 if self.stream.sample_rate > RATE_MIN: self.stream.sample_rate += k * RATE_DELTA if key == ord('w'): if shift: self.outputs.next_width() else: self.outputs.prev_width() self.resize() def process_events(self): for event in pygame.event.get(): if event.type == pygame.QUIT: self.exit() if event.type == pygame.KEYDOWN: self.process_key(event.key) if event.type == pygame.MOUSEMOTION: x = event.pos[0] R = self.stream.sample_rate N = self.stream.sample_size pull = 1 - self.sliders['pull'].value g = (self.num_bars - 1) * (N // 2 - 1) * pull v, h = util.shift_inverse_consts(0, 1, self.num_bars - 1, N // 2 - 1, g) bar_width = self.outputs.get_width() / self.num_bars bar_index = math.floor(x / bar_width) self.mouse_frequency = util.shift_inverse( bar_index, g, v, h) * (R / 2) / (N / 2 - 1) for slider in self.sliders.values(): if slider.moving: slider.set_value(x) if event.type == pygame.MOUSEBUTTONDOWN: for slider in self.sliders.values(): if slider.get_handle_rect().collidepoint(event.pos): slider.moving = True if event.type == pygame.MOUSEBUTTONUP: for slider in self.sliders.values(): slider.moving = False def exit(self): self.stream.close() pygame.display.quit() pygame.quit() sys.exit(0) def loop(self): self.process_events() surfaces = [self.time_surface, self.freq_surface, self.control_surface] for surface in surfaces: surface.fill(self.background_color) samples = self.get_samples() self.controls.draw(self.stream.sample_rate, self.stream.sample_size, self.sliders['pull'].value, self.sliders['smooth'].value, self.outputs.get_width(), self.num_bars, self.mouse_frequency) for slider in self.sliders.values(): slider.draw() self.draw_time_bars(samples, self.time_surface) self.draw_freq_bars(samples, self.freq_surface) self.surface.blit(self.time_surface, (0, 0)) self.surface.blit(self.freq_surface, (0, self.height // 2)) self.surface.blit(self.control_surface, (0, 0)) pygame.display.flip()
class Mux(MiniEngine): def __init__(self, *streams): MiniEngine.__init__(self) if not streams: raise Exception('Mux: must specify at least one stream.') self._streams = streams self._queue = Queue(100) self._stats = {} self._endpoints = dict([(s.connect(), s) for s in self._streams]) for e in self._endpoints: self._stats[e] = 0 e.notify(self._queue) for s in self._streams: if s.schema() != self._streams[0].schema(): raise Exception('Mux: schema of streams must match.') self._output = Stream(self._streams[0].schema(), SortOrder(), 'Mux Output') def output(self): return self._output def run(self): while self._endpoints or not self._queue.empty(): # print '\t\twaiting for endpoint' e = self._queue.get() if e not in self._endpoints: print '\t********* got non-existing endpoint' continue valid = True closed = False while valid and not closed: # print '\t\t%s: receiving' % (self._endpoints[e]) try: r = e.receive(False) self._stats[e] += 1 self._output.send(r) e.processed() except StreamClosedException: # print '\t\tReceive ClosedException.' closed = True except: valid = False # print '\t\tReceive failed.' else: if e in self._endpoints: # print '%s: closed? %s' % (self._endpoints[e], e.closed()) if closed: # print '%s: closed? %s' % (self._endpoints[e], e.closed()) del self._endpoints[e] else: # print '%s: already removed' % (e) pass self._queue.task_done() #print '\t\tAll streams done.' self._output.close() # for e in self._stats: # print 'Received %d records from %s' % (self._stats[e], e) print 'Mux: done.'
class Sort(MiniEngine): def __init__(self, input_stream, sort_attributes, all = False): MiniEngine.__init__(self) self._input_stream = input_stream self._input_ep = input_stream.connect() self._schema = self._input_stream.schema() self._all = all self._indices = [] # Passed as attribues = [('name', comparison_function), ...] for a in sort_attributes: # Check if the given attribute exists in the schema. i = self._schema.index(a[0]) t = self._schema[i].type() if a[1]: # If a comparison function is specified, use it. self._indices.append((i, a[1])) elif hasattr(t, '__cmp__'): # Otherwise test if the given type has a comparator and use # it. self._indices.append((i, None)) else: raise Exception('Type of attribute [%s] does not have ' + \ 'a comparison operator.' % (a)) self._output_stream = Stream( self._schema, SortOrder(), 'SORT' ) def output(self): return self._output_stream def _compare(self, a, b): for i in self._indices: # Defined as i = (index, comparator) if i[1]: x = i[1](a[i[0]], b[i[0]]) if x != 0: return x else: x = cmp(a[i[0]], b[i[0]]) if x != 0: return x return 0 def run(self): closed = False last = None set = [] while not closed: try: r = self._input_ep.receive() if type(r) is StopWord: if not self._all: set.sort(self._compare) for x in set: self._output_stream.send(x) set = [] self._output_stream.send(r) else: set.append(r) self._input_ep.processed() except StreamClosedException: closed = True if self._all: set.sort(self._compare) for x in set: self._output_stream.send(x) self._output_stream.send(StopWord()) print 'Closing SORT stream' self._output_stream.close()
class Join(MiniEngine): ''' The Join mini-engine combines the records of two streams in such a way that the result is the Cartesian product between two corresponding record partitions, one from each stream. ''' class PartitionBuffer(object): ''' This class represents a partition buffer for a given endpoint. Each partition received from the endpoint is stored in a separate buffer. ''' def __init__(self): self._b = [] def append(self, r): if len(self._b) == 0: self._b.append([]) self._b[-1].append(r) def current(self): return len(self._b) - 1 def next(self): self._b.append([]) def get(self, i): assert i >= 0 and i < len(self._b) return self._b[i] def finished(self, i): return i < len(self._b) - 1 def remove(self, i): assert i >= 0 and i < len(self._b) t = self._b[i] self._b[i] = None del t def __init__(self, first, second): MiniEngine.__init__(self) self._first = first self._second = second # Construct the schema of the output stream. self._schema = Schema() for a in self._first.schema() + self._second.schema(): self._schema.append(a) self._queue = Queue(100) self._first_ep = self._first.connect() self._first_ep.notify(self._queue) self._second_ep = self._second.connect() self._second_ep.notify(self._queue) self._output = Stream( self._schema, SortOrder(), 'Join' ) self._m = { self._first_ep: self._first, self._second_ep: self._second, } self._empty = 0 def output(self): return self._output def _merge(self, buffers, i): assert buffers[self._first_ep].finished(i) assert buffers[self._second_ep].finished(i) b1 = buffers[self._first_ep].get(i) b2 = buffers[self._second_ep].get(i) if len(b2) == 1: self._empty += 1 for r1 in b1[:-1]: for r2 in b2[:-1]: yield r1 + r2 buffers[self._first_ep].remove(i) buffers[self._second_ep].remove(i) def run(self): done = False buffers = { self._first_ep: self.PartitionBuffer(), self._second_ep: self.PartitionBuffer(), } while not done or not self._queue.empty(): e = self._queue.get() if e not in buffers: print 'ERROR: no buffer for endpoint' continue valid = True closed = False while valid and not closed: try: r = e.receive(False) buffers[e].append(r) if type(r) is StopWord: current = buffers[e].current() buffers[e].next() # Only merge if all buffers have completed this # partition. merge = True for o in buffers: merge &= buffers[o].finished(current) if merge: for x in self._merge(buffers, current): self._output.send(x) self._output.send(StopWord()) # Advance this buffer's partition by 1 e.processed() except StreamClosedException: closed = True except Empty: valid = False except: raise else: done = True for o in buffers: done &= o.closed() self._queue.task_done() self._output.close() print 'Join done. %d empty buffers.' % (self._empty)
class DataAccessor(MiniEngine): def __init__(self, query_stream, data_source, access_method): MiniEngine.__init__(self) self._query_stream = query_stream # Create an accessor for the combination of access method and data # source. This should fail if access method and data source are not # compatible. self._accessor = access_method(data_source) self._data_source = data_source self._access_method = access_method # make sure the accessor understands the query schema assert self._accessor.accepts(self._query_stream.schema()) self._query_stream_ep = self._query_stream.connect() # Create an output stream for this data accessor. The schema of the # output stream is determined by the data source. output_schema = self._data_source.schema() # We can only reasonably infer the sort order if all of the query # attributes are included in the output schema. if query_stream.sort_order() in output_schema: # The new sort order is the same as that of the query stream. sort_order = query_stream.sort_order() else: # No sort order can be inferred; using empty. sort_order = SortOrder() self._output_stream = Stream( output_schema, sort_order, 'DATA ACCESSOR' ) def output(self): ''' Returns the output stream for the given data accessor. ''' return self._output_stream; def run(self): ''' The main loop of the data accessor mini-engine. It processes every element from the query stream and performs a corresponding query against the configured data source. ''' # for each query in the query stream's end-point i = 0 closed = False while not closed: try: # print 'DA: Waiting to receive' q = self._query_stream_ep.receive() i += 1 if type(q) is StopWord: continue if q: # process the query and write result records into the output # stream for r in self._accessor.query(self._query_stream.schema(), q): self._output_stream.send(r) # finalize the partition that belongs to one query with a # stop word self._output_stream.send(StopWord()) else: # Technically stop words are not allowed in the query # stream, but they are silently ignored here. pass self._query_stream_ep.processed() except StreamClosedException: closed = True self._output_stream.close() print 'Closing DA stream'
class Group(MiniEngine): def __init__(self, input_stream, group_attributes): MiniEngine.__init__(self) self._input_stream = input_stream self._input_ep = input_stream.connect() self._schema = self._input_stream.schema() print self._schema self._indices = {} for a in group_attributes: i = self._schema.index(a) t = self._schema[i].type() if group_attributes[a]: self._indices[i] = group_attributes[a] elif hasattr(t, '__eq__'): self._indices[i] = None else: raise Exception('Type of attribute [%s] does not have ' + \ 'an equality operator.' % (a)) self._output_stream = Stream( self._schema, self._input_stream.sort_order(), 'GROUP' ) def output(self): return self._output_stream def _compare(self, a, b): for i in self._indices: if self._indices[i]: if not self._indices[i](a[i], b[i]): return False else: if a[i] != b[i]: return False return True def run(self): closed = False last = None while not closed: try: r = self._input_ep.receive() if type(r) is StopWord: # print 'Sending: %s' % (str(StopWord())) #self._output_stream.send(StopWord()) # print 'Sending: %s' % (str(r)) #self._output_stream.send(r) pass else: if last == None or self._compare(last, r): # print 'Sending: %s' % (str(r)) self._output_stream.send(r) else: # print 'Sending: %s' % (str(StopWord())) self._output_stream.send(StopWord()) # print 'Sending: %s' % (str(r)) self._output_stream.send(r) last = r self._input_ep.processed() except StreamClosedException: closed = True self._output_stream.send(StopWord()) print 'Closing GROUP stream' self._output_stream.close()
class DataAccessor(MiniEngine): def __init__(self, query_stream, data_source, access_method): MiniEngine.__init__(self) self._query_stream = query_stream # Create an accessor for the combination of access method and data # source. This should fail if access method and data source are not # compatible. self._accessor = access_method(data_source) self._data_source = data_source self._access_method = access_method # make sure the accessor understands the query schema assert self._accessor.accepts(self._query_stream.schema()) self._query_stream_ep = self._query_stream.connect() # Create an output stream for this data accessor. The schema of the # output stream is determined by the data source. output_schema = self._data_source.schema() # We can only reasonably infer the sort order if all of the query # attributes are included in the output schema. if query_stream.sort_order() in output_schema: # The new sort order is the same as that of the query stream. sort_order = query_stream.sort_order() else: # No sort order can be inferred; using empty. sort_order = SortOrder() self._output_stream = Stream(output_schema, sort_order, 'DATA ACCESSOR') def output(self): ''' Returns the output stream for the given data accessor. ''' return self._output_stream def run(self): ''' The main loop of the data accessor mini-engine. It processes every element from the query stream and performs a corresponding query against the configured data source. ''' # for each query in the query stream's end-point i = 0 closed = False while not closed: try: # print 'DA: Waiting to receive' q = self._query_stream_ep.receive() i += 1 if type(q) is StopWord: continue if q: # process the query and write result records into the output # stream for r in self._accessor.query(self._query_stream.schema(), q): self._output_stream.send(r) # finalize the partition that belongs to one query with a # stop word self._output_stream.send(StopWord()) else: # Technically stop words are not allowed in the query # stream, but they are silently ignored here. pass self._query_stream_ep.processed() except StreamClosedException: closed = True self._output_stream.close() print 'Closing DA stream'
class Mux(MiniEngine): def __init__(self, *streams): MiniEngine.__init__(self) if not streams: raise Exception('Mux: must specify at least one stream.') self._streams = streams self._queue = Queue(100) self._stats = {} self._endpoints = dict([(s.connect(), s) for s in self._streams]) for e in self._endpoints: self._stats[e] = 0 e.notify(self._queue) for s in self._streams: if s.schema() != self._streams[0].schema(): raise Exception('Mux: schema of streams must match.') self._output = Stream( self._streams[0].schema(), SortOrder(), 'Mux Output' ) def output(self): return self._output def run(self): while self._endpoints or not self._queue.empty(): # print '\t\twaiting for endpoint' e = self._queue.get() if e not in self._endpoints: print '\t********* got non-existing endpoint' continue valid = True closed = False while valid and not closed: # print '\t\t%s: receiving' % (self._endpoints[e]) try: r = e.receive(False) self._stats[e] += 1 self._output.send(r) e.processed() except StreamClosedException: # print '\t\tReceive ClosedException.' closed = True except: valid = False # print '\t\tReceive failed.' else: if e in self._endpoints: # print '%s: closed? %s' % (self._endpoints[e], e.closed()) if closed: # print '%s: closed? %s' % (self._endpoints[e], e.closed()) del self._endpoints[e] else: # print '%s: already removed' % (e) pass self._queue.task_done() #print '\t\tAll streams done.' self._output.close() # for e in self._stats: # print 'Received %d records from %s' % (self._stats[e], e) print 'Mux: done.'
class Group(MiniEngine): def __init__(self, input_stream, group_attributes): MiniEngine.__init__(self) self._input_stream = input_stream self._input_ep = input_stream.connect() self._schema = self._input_stream.schema() print self._schema self._indices = {} for a in group_attributes: i = self._schema.index(a) t = self._schema[i].type() if group_attributes[a]: self._indices[i] = group_attributes[a] elif hasattr(t, '__eq__'): self._indices[i] = None else: raise Exception('Type of attribute [%s] does not have ' + \ 'an equality operator.' % (a)) self._output_stream = Stream(self._schema, self._input_stream.sort_order(), 'GROUP') def output(self): return self._output_stream def _compare(self, a, b): for i in self._indices: if self._indices[i]: if not self._indices[i](a[i], b[i]): return False else: if a[i] != b[i]: return False return True def run(self): closed = False last = None while not closed: try: r = self._input_ep.receive() if type(r) is StopWord: # print 'Sending: %s' % (str(StopWord())) #self._output_stream.send(StopWord()) # print 'Sending: %s' % (str(r)) #self._output_stream.send(r) pass else: if last == None or self._compare(last, r): # print 'Sending: %s' % (str(r)) self._output_stream.send(r) else: # print 'Sending: %s' % (str(StopWord())) self._output_stream.send(StopWord()) # print 'Sending: %s' % (str(r)) self._output_stream.send(r) last = r self._input_ep.processed() except StreamClosedException: closed = True self._output_stream.send(StopWord()) print 'Closing GROUP stream' self._output_stream.close()
class Sort(MiniEngine): def __init__(self, input_stream, sort_attributes, all=False): MiniEngine.__init__(self) self._input_stream = input_stream self._input_ep = input_stream.connect() self._schema = self._input_stream.schema() self._all = all self._indices = [] # Passed as attribues = [('name', comparison_function), ...] for a in sort_attributes: # Check if the given attribute exists in the schema. i = self._schema.index(a[0]) t = self._schema[i].type() if a[1]: # If a comparison function is specified, use it. self._indices.append((i, a[1])) elif hasattr(t, '__cmp__'): # Otherwise test if the given type has a comparator and use # it. self._indices.append((i, None)) else: raise Exception('Type of attribute [%s] does not have ' + \ 'a comparison operator.' % (a)) self._output_stream = Stream(self._schema, SortOrder(), 'SORT') def output(self): return self._output_stream def _compare(self, a, b): for i in self._indices: # Defined as i = (index, comparator) if i[1]: x = i[1](a[i[0]], b[i[0]]) if x != 0: return x else: x = cmp(a[i[0]], b[i[0]]) if x != 0: return x return 0 def run(self): closed = False last = None set = [] while not closed: try: r = self._input_ep.receive() if type(r) is StopWord: if not self._all: set.sort(self._compare) for x in set: self._output_stream.send(x) set = [] self._output_stream.send(r) else: set.append(r) self._input_ep.processed() except StreamClosedException: closed = True if self._all: set.sort(self._compare) for x in set: self._output_stream.send(x) self._output_stream.send(StopWord()) print 'Closing SORT stream' self._output_stream.close()
class Tunnel(object): _TCP_INITIAL_DATA = 0 _TCP_FIN_DATA = 1 _TCP_CLOSED_DATA = 2 _UDP_INITIAL_DATA = 3 _UDP_CLOSED_DATA = 4 _TUN_INITIAL_DATA = 5 _PAYLOAD = 10 _HEARTBEAT = 100 _static_handlers = { _HEARTBEAT: (lambda _, __, ___: None) } @staticmethod def set_tcp_initial_handler(handler): Tunnel._static_handlers[Tunnel._TCP_INITIAL_DATA] = handler @staticmethod def set_tcp_fin_received_handler(handler): Tunnel._static_handlers[Tunnel._TCP_FIN_DATA] = handler @staticmethod def set_tcp_closed_handler(handler): Tunnel._static_handlers[Tunnel._TCP_CLOSED_DATA] = handler @staticmethod def set_udp_initial_handler(handler): Tunnel._static_handlers[Tunnel._UDP_INITIAL_DATA] = handler @staticmethod def set_udp_closed_handler(handler): Tunnel._static_handlers[Tunnel._UDP_CLOSED_DATA] = handler @staticmethod def set_tun_initial_handler(handler): Tunnel._static_handlers[Tunnel._TUN_INITIAL_DATA] = handler def __init__(self, connection=None, connect_to=None): self._stream = connection self._connect_to = connect_to self._on_initial_data = None self._on_payload = None self._on_stream_closed = None self._handlers = self._static_handlers.copy() self._handlers.update({ Tunnel._PAYLOAD: lambda _, id_, data: self._on_payload(self, id_, data) }) self._on_ready_to_send = None self._on_send_buffer_full = None self._hb_event = None self.connections = {} def __hash__(self): return hash(self._stream) def __eq__(self, other): if not isinstance(other, Tunnel): return False return self._stream == other._stream def __str__(self): return str(self._stream) def _send_heartbeat(self): self._send_content(Tunnel._HEARTBEAT, None, None) self._enable_heartbeat() def _enable_heartbeat(self): self._hb_event = Event.add_timer(HEARTBEAT_INTERVAL) self._hb_event.set_handler(lambda ev: self._send_heartbeat()) def _disable_heartbeat(self): if self._hb_event is not None: self._hb_event.del_timer() self._hb_event = None def _on_fin_received(self): self._disable_heartbeat() self._stream.close() def initialize(self): if self._stream is None: self._stream = Stream(prefix='TUNNEL') # self._stream.set_buffer_size(BUFF_SIZE) self._stream.set_tcp_no_delay() self._stream.append_send_handler(obscure.pack_data) self._stream.append_send_handler(obscure.random_padding) # self._stream.append_send_handler(obscure.gen_aes_encrypt()) self._stream.append_send_handler(obscure.gen_xor_encrypt()) # self._stream.append_send_handler(obscure.base64_encode) self._stream.append_send_handler(obscure.gen_http_encode(self._connect_to is not None)) self._stream.append_receive_handler(obscure.gen_http_decode(self._connect_to is not None)) # self._stream.append_receive_handler(obscure.base64_decode) self._stream.append_receive_handler(obscure.gen_xor_decrypt()) # self._stream.append_receive_handler(obscure.gen_aes_decrypt()) self._stream.append_receive_handler(obscure.unpad_random) self._stream.append_receive_handler(obscure.unpack_data) self._stream.set_on_ready_to_send(lambda _: self._on_tunnel_ready_to_send()) self._stream.set_on_send_buffer_full(lambda _: self._on_tunnel_send_buffer_full()) self._stream.set_on_received(lambda _, data, addr: self._on_received(data, addr)) self._stream.set_on_fin_received(lambda _: self._on_fin_received()) self._stream.set_on_closed(lambda _: self._on_closed()) if self._connect_to is not None: self._stream.connect(*self._connect_to) else: self._stream.set_on_decode_error(lambda _, received: self._on_decode_error(received)) self._stream.start_receiving() self._enable_heartbeat() def register(self, key, conn): _logger.debug('%s, register: %s(%s)', str(self), str(key), str(conn)) assert(key not in self.connections) self.connections[key] = conn def deregister(self, key): _logger.debug('%s, deregister(%s)', str(self), str(key)) if key in self.connections: del self.connections[key] def get_connection(self, key): if key in self.connections: return self.connections[key] else: _logger.debug('no such connection: %s', str(key)) return None def clear_connections(self): _logger.debug('%s, clear_connections (%d)', str(self), len(self.connections)) self.connections.clear() def is_ready_to_send(self): return self._stream.is_ready_to_send() def _send_content(self, type_, id_, content): if id_ is None: to_send = struct.pack('!HI', type_, 0) + '\x00' * 16 else: to_send = struct.pack('!HI', type_, len(content)) + id_.get_bytes() + content self._stream.send(to_send) def _on_tunnel_ready_to_send(self): if self._on_ready_to_send is not None: self._on_ready_to_send(self) def _on_tunnel_send_buffer_full(self): if self._on_send_buffer_full is not None: self._on_send_buffer_full(self) def _on_received(self, data, _addr): _logger.debug("tunnel %s received %d bytes" % (str(self), len(data))) if len(data) < 6 + 16: raise Exception('corrupted data') type_, content_length = struct.unpack('!HI', data[: 6]) id_ = uuid.UUID(bytes=data[6: 6 + 16]) if type_ not in self._handlers or self._handlers[type_] is None: _logger.warning("tunnel message type %d can not be handled", type_) if len(data) - 6 - 16 != content_length: raise Exception('corrupted data') self._handlers[type_](self, id_, data[6 + 16:]) return True def send_tcp_initial_data(self, id_, data): self._send_content(Tunnel._TCP_INITIAL_DATA, id_, data) def send_tcp_fin_data(self, id_, data=''): self._send_content(Tunnel._TCP_FIN_DATA, id_, data) def send_tcp_closed_data(self, id_, data=''): self._send_content(Tunnel._TCP_CLOSED_DATA, id_, data) def send_udp_initial_data(self, id_, data): self._send_content(Tunnel._UDP_INITIAL_DATA, id_, data) def send_udp_closed_data(self, id_, data=''): self._send_content(Tunnel._UDP_CLOSED_DATA, id_, data) def send_tun_initial_data(self, id_, data): self._send_content(Tunnel._TUN_INITIAL_DATA, id_, data) def send_payload(self, id_, payload): self._send_content(Tunnel._PAYLOAD, id_, payload) def set_on_payload(self, handler): self._on_payload = handler def set_on_ready_to_send(self, handler): self._on_ready_to_send = handler def set_on_send_buffer_full(self, handler): self._on_send_buffer_full = handler def _on_closed(self): self._disable_heartbeat() if self._on_stream_closed is not None: self._on_stream_closed(self) def set_on_closed(self, handler): self._on_stream_closed = handler def close(self): self._stream.close() def is_closed(self): return self._stream.is_closed() def _on_decode_error(self, received): self._disable_heartbeat() self._stream._encoders = [] backend = Stream(prefix="SIMPLE") def tunnel_ready_to_send(_): backend.start_receiving() def tunnel_send_buffer_full(_): backend.stop_receiving() def tunnel_received(_, data, _addr): backend.send(data) return backend.is_ready_to_send() def tunnel_closed(_): backend.close() def backend_received(_, data, _addr): self._stream.send(data) return self._stream.is_ready_to_send() def backend_closed(_self): self._stream.close() self._stream.set_on_ready_to_send(tunnel_ready_to_send) self._stream.set_on_send_buffer_full(tunnel_send_buffer_full) self._stream.set_on_received(tunnel_received) self._stream.set_on_closed(tunnel_closed) backend.set_on_received(backend_received) backend.set_on_closed(backend_closed) if received is not None and len(received) > 0: backend.send(received) backend.connect(UNKNOWN_CONN_ADDR, UNKNOWN_CONN_PORT)
class CheatGame: def __init__(self): self.engine = None self.roomKey = None self.localPlayer = None self.callStream = None self.turnStream = None self.takeTurnAfterCalls = False def printHeader(self): clearScreen() print("______________________________") print(" CHEAT ") print(" by Anmol Parande ") print("______________________________") def printWelcome(self, name): clearScreen() print(f"Welcome {name}. What would you like to do?") print("1. Join Room") print("2. Create Room") def start(self): self.printHeader() name = self.enterName() self.localPlayer = Player(name) self.printWelcome(name) choice = self.handleEntrance() clearScreen() if choice == 1: success = False while not success: self.roomKey = input("Please enter your room key: ") success = self.joinRoom() else: self.localPlayer.isHost = True self.roomKey = self.createRoom() self.engine = Engine(self.localPlayer) self.waitForOthers() def enterName(self): name = input("Please enter your name: ") return name def joinRoom(self): success, message = firebaseutils.joinRoom(self.roomKey, self.localPlayer.name) if not success: print(f"Error: Could not join room because {message}") return success def createRoom(self): success, message, data = firebaseutils.createRoom(self.localPlayer.name) if not success: print("Could not create room key. Please try again") os._exit(1) clearScreen() print(message) return data['roomKey'] def waitForOthers(self): recentlyJoined = True def putFunc(data): nonlocal recentlyJoined for p in data: success = self.engine.addPlayer(p) if recentlyJoined and success: print(f"{p} is already in the room") elif success: print(f"{p} joined the room") recentlyJoined = False playerStream = Stream(putFunc) firebaseutils.listenToPlayers(playerStream, self.roomKey) if self.localPlayer.isHost: self.waitForStart() playerStream.close() self.engine.startGame() self.startGame() else: def shouldStartGame(stillWaiting): if not stillWaiting: clearScreen() print("The host has started the game.") startStream.close() playerStream.close() try: self.loadGame() except firebaseutils.FirebaseError: self.exitWithError() startStream = Stream(shouldStartGame) firebaseutils.listenForStart(startStream, self.roomKey) def handleEntrance(self): choice = input() while choice != "1" and choice != "2": choice = input("Please enter either 1 or 2: ") return int(choice) def startGame(self): hands = self.engine.listHands() self.engine.orderPlayers() firebaseutils.startGame(self.roomKey, hands, self.engine.playerList) clearScreen() self.printTurns() self.turnStream = Stream(self.turnListener) firebaseutils.listenForTurn(self.turnStream, self.roomKey) def turnListener(self, data): if data != None: actual = data.get("lastPlayedCard", None) calls = data.get('calls', []) else: actual = None calls = [] self.engine.lastPlayedCard = actual if actual is not None and self.engine.currentPlayer().name != self.localPlayer.name: print(f"{self.engine.currentPlayer().name} played a {strFromValue(self.engine.currentRank + 2)}, but they might be lying") self.engine.registerTurn() self.takeTurnAfterCalls = True didCall = self.makeDecision() == 'c' firebaseutils.logCall(self.roomKey, self.localPlayer.name, didCall) self.callStream = Stream(self.callListener) firebaseutils.listenForCall(self.callStream, self.roomKey) else: self.engine.registerTurn() self.takeTurn() def callListener(self, data): if data is None: return result = self.engine.logCalls(data) if self.engine.isReadyForNextPlayer: clearScreen() if result == 0: if self.engine.previousPlayer().name == self.localPlayer.name: print("Nobody thought you were bluffing :)") else: print(f"Nobody thought {self.engine.previousPlayer().name} was bluffing") elif result == -1: if self.engine.previousPlayer().name == self.localPlayer.name: print("You were called on your bluff! You just picked up the pile :(") else: print(f"{self.engine.previousPlayer().name} was bluffing!") elif result == 1: if self.engine.previousPlayer().name == self.localPlayer.name: print("People thought you bluffed, but they were wrong :)") else: print(f"{self.engine.previousPlayer().name} was not bluffing! All players who thought they were have divided the pile amongst themselves") print() self.callStream.close() if self.engine.isGameOver(): self.endGame() self.printTurns() if self.takeTurnAfterCalls: self.takeTurn() def takeTurn(self): if self.engine.currentPlayer().name != self.localPlayer.name: return cardHash = self.engine.takeTurn() clearScreen() firebaseutils.clearCalls(self.roomKey) firebaseutils.logTurn(self.roomKey, cardHash) print("Waiting for other players to call your bluff or let you pass") self.takeTurnAfterCalls = False self.callStream = Stream(self.callListener) firebaseutils.listenForCall(self.callStream, self.roomKey) def waitForStart(self): shouldExit = input("Press a key when you are ready to start the game: \n") while len(self.engine.players) == 1: shouldExit = input("At least two people need to be in the game to start: \n") def makeDecision(self): didCall = input("Type 'c' to call their bluff and 'p' to let them pass\n") if didCall != 'c' and didCall != 'p': didCall = input("Please type 'c' or 'p'") return didCall def loadGame(self): time.sleep(0.1) hands, turnList = firebaseutils.loadGameData(self.roomKey) self.engine.setGameState(hands, turnList) self.printTurns() turnStream = Stream(self.turnListener) firebaseutils.listenForTurn(turnStream, self.roomKey) def endGame(self): print(f"Game Over: {self.engine.previousPlayer().name} won") _thread.interrupt_main() os._exit(0) def exitWithError(self): print("Oops. Something went wrong. Gameplay was ended") _thread.interrupt_main() os.exit(1) def printTurns(self): if self.engine.currentPlayer().name == self.localPlayer.name: print("It is your turn") else: print(f"It is {self.engine.currentPlayer().name}'s turn") self.localPlayer.printHand() print()
class Join(MiniEngine): ''' The Join mini-engine combines the records of two streams in such a way that the result is the Cartesian product between two corresponding record partitions, one from each stream. ''' class PartitionBuffer(object): ''' This class represents a partition buffer for a given endpoint. Each partition received from the endpoint is stored in a separate buffer. ''' def __init__(self): self._b = [] def append(self, r): if len(self._b) == 0: self._b.append([]) self._b[-1].append(r) def current(self): return len(self._b) - 1 def next(self): self._b.append([]) def get(self, i): assert i >= 0 and i < len(self._b) return self._b[i] def finished(self, i): return i < len(self._b) - 1 def remove(self, i): assert i >= 0 and i < len(self._b) t = self._b[i] self._b[i] = None del t def __init__(self, first, second): MiniEngine.__init__(self) self._first = first self._second = second # Construct the schema of the output stream. self._schema = Schema() for a in self._first.schema() + self._second.schema(): self._schema.append(a) self._queue = Queue(100) self._first_ep = self._first.connect() self._first_ep.notify(self._queue) self._second_ep = self._second.connect() self._second_ep.notify(self._queue) self._output = Stream(self._schema, SortOrder(), 'Join') self._m = { self._first_ep: self._first, self._second_ep: self._second, } self._empty = 0 def output(self): return self._output def _merge(self, buffers, i): assert buffers[self._first_ep].finished(i) assert buffers[self._second_ep].finished(i) b1 = buffers[self._first_ep].get(i) b2 = buffers[self._second_ep].get(i) if len(b2) == 1: self._empty += 1 for r1 in b1[:-1]: for r2 in b2[:-1]: yield r1 + r2 buffers[self._first_ep].remove(i) buffers[self._second_ep].remove(i) def run(self): done = False buffers = { self._first_ep: self.PartitionBuffer(), self._second_ep: self.PartitionBuffer(), } while not done or not self._queue.empty(): e = self._queue.get() if e not in buffers: print 'ERROR: no buffer for endpoint' continue valid = True closed = False while valid and not closed: try: r = e.receive(False) buffers[e].append(r) if type(r) is StopWord: current = buffers[e].current() buffers[e].next() # Only merge if all buffers have completed this # partition. merge = True for o in buffers: merge &= buffers[o].finished(current) if merge: for x in self._merge(buffers, current): self._output.send(x) self._output.send(StopWord()) # Advance this buffer's partition by 1 e.processed() except StreamClosedException: closed = True except Empty: valid = False except: raise else: done = True for o in buffers: done &= o.closed() self._queue.task_done() self._output.close() print 'Join done. %d empty buffers.' % (self._empty)