def write_data(self, data): # data to write on file file_data = to_str(data['data']) # If we want to prefix the stream with the current datetime if self._time_format is not None: if 'timestamp' in data: time = self.fromtimestamp(data['timestamp']) else: time = self.now() time = time.strftime(self._time_format) prefix = '{time} [{pid}] | '.format(time=time, pid=data['pid']) file_data = prefix + file_data.rstrip('\n') file_data = file_data.replace('\n', '\n' + prefix) file_data += '\n' # writing into the file try: self._file.write(file_data) except Exception: file_data = file_data.encode('latin-1', errors='replace') file_data = file_data.decode('latin-1') self._file.write(file_data) self._file.flush()
def handle_recv(self, data): """called each time circusd sends an event""" # maintains a periodic callback to compute mem and cpu consumption for # each pid. logger.debug('Received an event from circusd: %s' % str(data)) topic, msg = data try: topic = to_str(topic) watcher = topic.split('.')[1:-1][0] action = topic.split('.')[-1] msg = json.loads(msg) if action in ('reap', 'kill'): # a process was reaped pid = msg['process_pid'] self.remove_pid(watcher, pid) elif action == 'spawn': # a process was added pid = msg['process_pid'] self._append_pid(watcher, pid) elif action == 'stop': # the whole watcher was stopped. self.stop_watcher(watcher) else: logger.debug('Unknown action: %r' % action) logger.debug(msg) except Exception: logger.exception('Failed to handle %r' % msg)
def handle_recv(self, data): topic, msg = data topic_parts = to_str(topic).split(".") watcher = topic_parts[1] action = topic_parts[2] with open(self.config['file'], 'a+') as f: f.write('%s:%s' % (watcher, action))
def iter_messages(self): """ Yields tuples of (watcher, subtopic, stat)""" recv = self.pubsub_socket.recv_multipart with self: while True: try: events = dict(self.poller.poll(self.timeout * 1000)) except zmq.ZMQError as e: if e.errno == errno.EINTR: continue raise if len(events) == 0: continue try: topic, stat = recv() except zmq.core.error.ZMQError as e: if e.errno != errno.EINTR: raise else: try: sys.exc_clear() except Exception: pass continue topic = to_str(topic).split('.') if len(topic) == 3: __, watcher, subtopic = topic yield watcher, subtopic, json.loads(stat) elif len(topic) == 2: __, watcher = topic yield watcher, None, json.loads(stat)
def __call__(self, data): for line in to_str(data['data']).split('\n'): if line: self.out.write(self.prefix(data)) self.out.write(line) # stop coloring self.out.write('\033[0m\n') self.out.flush()
def run_ctl(args, queue=None, stdin='', endpoint=DEFAULT_ENDPOINT_DEALER): cmd = '%s -m circus.circusctl' % PYTHON if '--endpoint' not in args: args = '--endpoint %s ' % endpoint + args proc = subprocess.Popen(cmd.split() + shlex.split(args), stdin=subprocess.PIPE if stdin else None, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate(to_bytes(stdin) if stdin else None) stdout = to_str(stdout) stderr = to_str(stderr) if queue: queue.put(stderr) queue.put(stdout) queue.put(proc.returncode) try: import gevent if hasattr(gevent, 'shutdown'): gevent.shutdown() except ImportError: pass return stdout, stderr
def read(self, timeout=None): timeout = timeout or self._timeout if self._buffer: raise tornado.gen.Return(self._buffer.pop(0)) start = time.time() while time.time() - start < timeout: try: msg = self._stream.get_nowait() lines = [l for l in to_str(msg['data']).split('\n') if l] self._buffer.extend(lines) raise tornado.gen.Return(self._buffer.pop(0)) except Empty: yield tornado_sleep(0.1) raise TimeoutException('Timeout reading queue')
def test_copy_path(self): watcher = SomeWatcher(stream=True) yield watcher.run() # wait for watcher data at most 5s messages = [] resp = False start_time = time.time() while (time.time() - start_time) <= 5: yield tornado_sleep(0.5) # More than one Queue.get call is needed to get full # output from a watcher in an environment with rich sys.path. try: m = watcher.stream.get(block=False) messages.append(m) except Queue.Empty: pass data = ''.join(to_str(m['data']) for m in messages) if 'XYZ' in data: resp = True break self.assertTrue(resp) yield watcher.stop()
def read_from_stream(stream, desired_channel, timeout=10): start = time.time() accumulator = '' if desired_channel not in channels: channels[desired_channel] = [] while not channels[desired_channel] and time.time() - start < timeout: try: data = stream.get_nowait() data = to_str(data['data']).split('\n') accumulator += data.pop(0) if data: data.insert(0, accumulator) accumulator = data.pop() for line in data: if len(line) > 1 and line[1] == ':': channel, string = line.partition(':')[::2] channels[int(channel)].append(string) except Empty: yield tornado_sleep(0.1) if channels[desired_channel]: raise tornado.gen.Return(channels[desired_channel].pop(0)) raise TimeoutException('Timeout reading queue')
def __call__(self, data): sys.stdout.write(to_str(data['data'])) sys.stdout.flush()
def split_data(data): topic, msg = data topic_parts = to_str(topic).split(".") return topic_parts[1], topic_parts[2], msg