def init(self, init_req): self._logfile.write("initting\n") msg = '' for opt in init_req.options: if opt.name == 'order': self._order = opt.values[0].intValue #elif opt.name == 'size': # self._size = opt.values[0].intValue elif opt.name == 'field1': self._field1 = opt.values[0].stringValue elif opt.name == 'field2': self._field2 = opt.values[0].stringValue #if self._size <= 1: # success = False # msg += ' must supply window size > 1' success = True # Do some action to intitialize the statistical code # In this case we initialize the historical window # e.g. self._history = MovingStats(size) response = udf_pb2.Response() response.init.success = success response.init.error = msg #response.init.error = msg[1:] self._logfile.write("returning init response\n\n") return response
def init(self, init_req): success = True msg = '' for opt in init_req.options: if opt.name == 'field': self._field = opt.values[0].stringValue elif opt.name == 'size': self._size = opt.values[0].intValue elif opt.name == 'as': self._as = opt.values[0].stringValue if self._field is None: success = False msg += ' must supply field name' if self._size == 0: success = False msg += ' must supply window size' if self._as == '': success = False msg += ' invalid as name' response = udf_pb2.Response() response.init.success = success response.init.error = msg[1:] return response
def begin_batch(self, begin_req): self._state.reset() # Send an identical begin batch back to Kapacitor response = udf_pb2.Response() response.begin.CopyFrom(begin_req) self._agent.write_response(response)
def _read_loop(self): request = udf_pb2.Request() while True: msg = 'unknown' try: size = decodeUvarint32(self._in) data = self._in.read(size) request.ParseFromString(data) # use parsed message msg = request.WhichOneof("message") if msg == "info": response = self.handler.info() self.write_response(response, flush=True) elif msg == "init": response = self.handler.init(request.init) self.write_response(response, flush=True) elif msg == "keepalive": response = udf_pb2.Response() response.keepalive.time = request.keepalive.time self.write_response(response, flush=True) elif msg == "snapshot": response = self.handler.snapshot() self.write_response(response, flush=True) elif msg == "restore": response = self.handler.restore(request.restore) self.write_response(response, flush=True) elif msg == "begin": self.handler.begin_batch(request.begin) elif msg == "point": self.handler.point(request.point) elif msg == "end": self.handler.end_batch(request.end) else: logger.error("received unhandled request %s", msg) except EOF: break except Exception as e: traceback.print_exc() error = "error processing request of type %s: %s" % (msg, e) logger.error(error) response = udf_pb2.Response() response.error.error = error self.write_response(response) break
def info(self): response = udf_pb2.Response() response.info.wants = udf_pb2.BATCH response.info.provides = udf_pb2.BATCH response.info.options['field'].valueTypes.append(udf_pb2.STRING) response.info.options['scale'].valueTypes.append(udf_pb2.DOUBLE) logger.info("info") return response
def snapshot(self): data = {} for group, state in self._state.iteritems(): data[group] = state.snapshot() response = udf_pb2.Response() response.snapshot.snapshot = json.dumps(data) return response
def info(self): response = udf_pb2.Response() response.info.wants = udf_pb2.STREAM response.info.provides = udf_pb2.STREAM response.info.options['field'].valueTypes.append(udf_pb2.STRING) response.info.options['size'].valueTypes.append(udf_pb2.INT) response.info.options['as'].valueTypes.append(udf_pb2.STRING) return response
def end_batch(self, end_req): outliers = self._state.outliers(self._scale) response = udf_pb2.Response() for outlier in outliers: response.point.CopyFrom(outlier) self._agent.write_response(response) # Send an identical end batch back to Kapacitor response.end.CopyFrom(end_req) self._agent.write_response(response)
def info(self): self._logfile.write("getting info\n") response = udf_pb2.Response() response.info.wants = udf_pb2.STREAM response.info.provides = udf_pb2.STREAM response.info.options['order'].valueTypes.append(udf_pb2.INT) #response.info.options['size'].valueTypes.append(udf_pb2.INT) response.info.options['field1'].valueTypes.append(udf_pb2.STRING) response.info.options['field2'].valueTypes.append(udf_pb2.STRING) self._logfile.write("returning info response\n\n") return response
def point(self, point): response = udf_pb2.Response() response.point.CopyFrom(point) response.point.ClearField('fieldsInt') response.point.ClearField('fieldsString') response.point.ClearField('fieldsDouble') value = point.fieldsDouble[self._field] if point.group not in self._state: self._state[point.group] = AvgHandler.state(self._size) avg = self._state[point.group].update(value) response.point.fieldsDouble[self._as] = avg self._agent.write_response(response)
def restore(self, restore_req): success = False msg = '' try: data = json.loads(restore_req.snapshot) for group, snapshot in data.iteritems(): self._state[group] = AvgHandler.state(0) self._state[group].restore(snapshot) success = True except Exception as e: success = False msg = str(e) response = udf_pb2.Response() response.restore.success = success response.restore.error = msg return response
def init(self, init_req): success = True msg = '' for opt in init_req.options: if opt.name == 'field': self._field = opt.values[0].stringValue elif opt.name == 'scale': self._scale = opt.values[0].doubleValue elif opt.name == 'as': self._as = opt.values[0].stringValue if self._field is None: success = False msg += ' must supply field name' if self._scale < 1.0: success = False msg += ' invalid scale must be >= 1.0' response = udf_pb2.Response() response.init.success = success response.init.error = msg[1:] return response
def info(self): response = udf_pb2.Response() response.info.wants = udf_pb2.STREAM response.info.provides = udf_pb2.STREAM return response
def _read_loop(self): request = udf_pb2.Request() while True: #print("READIN' in the READ LOOP", file=sys.stderr) msg = 'unknown' try: size = decodeUvarint32(self._in) #print("DECODED' in the READ LOOP", file=sys.stderr) data = self._in.read(size) #print("RED IT IN' in the READ LOOP", file=sys.stderr) request.ParseFromString(data) #print("PARSED IT in the READ LOOP", file=sys.stderr) # use parsed message msg = request.WhichOneof("message") #print("GOT THE MESSAGE", file=sys.stderr) if msg == "info": print("INFO", file=sys.stderr) response = self.handler.info() self.write_response(response, flush=True) elif msg == "init": print("INIT", file=sys.stderr) response = self.handler.init(request.init) self.write_response(response, flush=True) elif msg == "keepalive": print("KEEPALIVE", file=sys.stderr) response = udf_pb2.Response() response.keepalive.time = request.keepalive.time self.write_response(response, flush=True) elif msg == "snapshot": print("SNAPSHOT", file=sys.stderr) response = self.handler.snapshot() self.write_response(response, flush=True) elif msg == "restore": print("RESTORE", file=sys.stderr) response = self.handler.restore(request.restore) self.write_response(response, flush=True) elif msg == "begin": print("BEGIN", file=sys.stderr) self.handler.begin_batch(request.begin) elif msg == "point": #print("POINT", file=sys.stderr) self.handler.point(request.point) elif msg == "end": print("END", file=sys.stderr) self.handler.end_batch(request.end) else: print("SOME ERROR", file=sys.stderr) logger.error("received unhandled request %s", msg) except EOF: print("EOF", file=sys.stderr) print(str(EOF), file=sys.stderr) break except Exception as e: print("EXCEPTION", file=sys.stderr) traceback.print_exc() error = "error processing request of type %s: %s" % (msg, e) logger.error(error) response = udf_pb2.Response() response.error.error = error self.write_response(response) break
def init(self, init_req): response = udf_pb2.Response() response.init.success = True return response
def point(self, point): response = udf_pb2.Response() response.point.CopyFrom(point) self._agent.write_response(response, True)
def restore(self, restore_req): response = udf_pb2.Response() response.restore.success = False response.restore.error = 'not implemented' return response
def snapshot(self): response = udf_pb2.Response() response.snapshot.snapshot = '' return response