def getHTML(self): if not self.enabled: return '' if not self.data or not self.curves: return u'<span>No data or curves found</span>' with self.lock: for i, (d, c) in enumerate(zip(self.data, self.curves)): try: # add a point "current value" at "right now" to avoid curves # not updating if the value doesn't change now = currenttime() if d[0][-1] < now - 10: self.updatevalues(i, now, d[1][-1]) c.x, c.y = self.maybeDownsamplePlotdata(d) except IndexError: # no data (yet) pass c = self.axes.getCurves() self.axes.setWindow(c.xmin, c.xmax, c.ymin, c.ymax) if os.path.isfile(self.tempfile): os.unlink(self.tempfile) gr.beginprint(self.tempfile) gr.setwsviewport(0, self.width * 0.0022, 0, self.height * 0.0022) try: self.plot.drawGR() finally: gr.endprint() gr.clearws() with open(self.tempfile, 'rb') as fp: imgbytes = fp.read() return (u'<img src="data:image/svg+xml;base64,%s" ' 'style="width: %sex; height: %sex">' % (from_utf8(b2a_base64(imgbytes)), self.width, self.height))
def _process_data(self, data, sync_str=to_utf8(SYNC_MARKER + OP_TELLOLD), lmatch=line_pattern.match, mmatch=msg_pattern.match): # n = 0 i = 0 # avoid making a string copy for every line match = lmatch(data, i) while match: line = match.group(1) i = match.end() if sync_str in line: self.log.debug('process data: received sync: %r', line) self._synced = True else: msgmatch = mmatch(from_utf8(line)) # ignore invalid lines if msgmatch: # n += 1 try: self._handle_msg(**msgmatch.groupdict()) except Exception: self.log.exception('error handling message %r', msgmatch.group()) # continue loop match = lmatch(data, i) # self.log.debug('processed %d items', n) return data[i:]
def recv_command(self): try: # "close connection" (clean up data associated with this client) # after a long stretch of inactivity item = self.command_queue.get(timeout=3600.) except queue.Empty: raise CloseConnection return self.serializer.deserialize_cmd(item[4], from_utf8(item[2]))
def get_event(self): item = self.event_sock.recv_multipart() if len(item) < 3: raise ProtocolError('invalid frames received') event = from_utf8(item[1]) # serialized or raw event data? if DAEMON_EVENTS[event][0]: return self.serializer.deserialize_event(item[2], item[1]) else: return item[2]
def get_git_version(abbrev=4, cwd=None): try: p = Popen( ['git', 'describe', '--abbrev=%d' % abbrev], cwd=cwd or config.nicos_root, stdout=PIPE, stderr=PIPE) stdout, _stderr = p.communicate() return from_utf8(stdout.strip()).strip('v') except Exception: return None
def test_shell(console): console.stdin.write(b'NewSetup("axis")\n1/0\nread()\n') stdout, _ = console.communicate() stdout = from_utf8(stdout).splitlines() assert 'nicos: setups loaded: startup' in stdout assert 'nicos: setups loaded: axis' in stdout assert any(line.endswith('nicos: >>> 1/0') for line in stdout) assert 'nicos: ZeroDivisionError - division by zero' in stdout assert any(line.endswith('nicos: >>> read()') for line in stdout) assert 'nicos: shutting down...' in stdout
def handle_entry(self, time, data): self.out.timestamp(time) if creole: emitter = creole.HtmlEmitter( creole.Parser(from_utf8(data)).parse(), self.out.new_id) data = emitter.emit() headers = emitter.headers else: data, headers = escape(data), [] self.out.newstate('entry', '', '', data) for level, text, targetid in headers: self.out.toc_entry(level, text, targetid)
def _process_data(self, data, reply_callback): # split data buffer into message lines and handle these match = line_pattern.match(data) while match: line = match.group(1) data = data[match.end():] if not line: self.log.info('got empty line, closing connection') self.closedown() return b'' try: ret = self._handle_line(from_utf8(line)) except Exception as err: self.log.warning('error handling line %r', line, exc=err) else: # self.log.debug('return is %r', ret) for item in ret: reply_callback(item) # continue loop with next match match = line_pattern.match(data) return data
def cache_dump(obj): res = [] if isinstance(obj, repr_types): res.append(repr(obj)) elif isinstance(obj, list): res.append('[') for item in obj: res.append(cache_dump(item)) res.append(',') res.append(']') elif isinstance(obj, tuple): res.append('(') for item in obj: res.append(cache_dump(item)) res.append(',') res.append(')') elif isinstance(obj, dict): res.append('{') for key, value in iteritems(obj): res.append(cache_dump(key)) res.append(':') res.append(cache_dump(value)) res.append(',') res.append('}') elif isinstance(obj, frozenset): res.append('{') for item in obj: res.append(cache_dump(item)) res.append(',') res.append('}') elif obj is None: return 'None' else: try: resstr = 'cache_unpickle("' + \ from_utf8(b64encode(pickle.dumps(obj, protocol=0))) + '")' res.append(resstr) except Exception as err: raise ValueError('unserializable object: %r (%s)' % (obj, err)) return ''.join(res)
def _single_request(self, tosend, sentinel=b'\n', retry=2, sync=False): """Communicate over the secondary socket.""" if not self._socket: self._disconnect('single request: no socket') if not self._socket: raise CacheError('cache not connected') if sync: # sync has to be false for lock requests, as these occur during startup self._queue.join() with self._sec_lock: if not self._secsocket: try: self._secsocket = tcpSocket(self.cache, DEFAULT_CACHE_PORT) except Exception as err: self.log.warning( 'unable to connect secondary socket ' 'to %s: %s', self.cache, err) self._secsocket = None self._disconnect('secondary socket: could not connect') raise CacheError('secondary socket could not be created') try: # write request # self.log.debug("get_explicit: sending %r", tosend) self._secsocket.sendall(to_utf8(tosend)) # give 10 seconds time to get the whole reply timeout = currenttime() + 10 # read response data = b'' while not data.endswith(sentinel): newdata = self._secsocket.recv(BUFSIZE) # blocking read if not newdata: raise socket.error('cache closed connection') if currenttime() > timeout: # do not just break, we need to reopen the socket raise socket.error('getting response took too long') data += newdata except socket.error: self.log.warning('error during cache query', exc=1) closeSocket(self._secsocket) self._secsocket = None if retry: for m in self._single_request(tosend, sentinel, retry - 1): yield m return raise lmatch = line_pattern.match mmatch = msg_pattern.match i = 0 # self.log.debug("get_explicit: data =%r", data) match = lmatch(data, i) while match: line = match.group(1) i = match.end() msgmatch = mmatch(from_utf8(line)) if not msgmatch: # ignore invalid lines continue # self.log.debug('line processed: %r', line) yield msgmatch match = lmatch(data, i)