def do_POST(self): print("INGEST POST: %s" % self.path) body = self.rfile.read(int(self.headers.get("Content-Length"))) is_json = "application/json" in self.headers.get("Content-Type") if "gzip" in self.headers.get("Content-Encoding", ""): body = gzip.decompress(body) if "datapoint" in self.path: dp_upload = sf_pbuf.DataPointUploadMessage() if is_json: json_format.Parse(body, dp_upload) else: dp_upload.ParseFromString(body) datapoints.extend(dp_upload.datapoints) # pylint: disable=no-member elif "event" in self.path: event_upload = sf_pbuf.EventUploadMessage() if is_json: json_format.Parse(body, event_upload) else: event_upload.ParseFromString(body) events.extend(event_upload.events) # pylint: disable=no-member elif "trace" in self.path: spans.extend(json.loads(body)) else: self.send_response(404) self.end_headers() return self.send_response(200) self.send_header("Content-Type", "text/ascii") self.send_header("Content-Length", "4") self.end_headers() self.wfile.write('"OK"'.encode("utf-8"))
def do_POST(self): body = self.rfile.read(int(self.headers.get('Content-Length'))) is_json = "application/json" in self.headers.get("Content-Type") if "gzip" in self.headers.get("Content-Encoding", ""): body = gzip.decompress(body) if 'datapoint' in self.path: dp_upload = sf_pbuf.DataPointUploadMessage() if is_json: json_format.Parse(body, dp_upload) else: dp_upload.ParseFromString(body) datapoints.extend(dp_upload.datapoints) elif 'event' in self.path: event_upload = sf_pbuf.EventUploadMessage() if is_json: json_format.Parse(body, event_upload) else: event_upload.ParseFromString(body) events.extend(event_upload.events) else: self.send_response(404) self.end_headers() return self.send_response(200) self.send_header("Content-Type", "text/ascii") self.send_header("Content-Length", "4") self.end_headers() self.wfile.write("\"OK\"".encode("utf-8"))
def datapoints(self): """ Fetch the datapoints from the fake ingest and deserialize them """ resp = requests.get(self.local_url + "/datapoints") dp_message = sf_pbuf.DataPointUploadMessage() dp_message.ParseFromString(resp.content) return dp_message.datapoints
async def handle_datapoints(request): is_json = "application/json" in request.headers.get("content-type") dp_upload = sf_pbuf.DataPointUploadMessage() if is_json: json_format.Parse(request.body, dp_upload) else: dp_upload.ParseFromString(request.body) datapoint_queue.put(dp_upload) return response.json("OK")
def test_forwarder_datapoints_protobuf(): """ Test that the forwarder can consume protobuf datapoints """ port = random.randint(5001, 20000) with Agent.run( dedent(f""" hostname: "testhost" monitors: - type: signalfx-forwarder listenAddress: localhost:{port} """)) as agent: assert wait_for(p(tcp_port_open_locally, port)), "datapoint forwarder port never opened!" dpum = sf_pbuf.DataPointUploadMessage() for typ, dps in TEST_DATAPOINTS.items(): for dp in dps: pbuf_dp = sf_pbuf.DataPoint() pbuf_dp.metricType = getattr(sf_pbuf, typ.upper()) pbuf_dp.value.intValue = dp["value"] pbuf_dp.metric = dp["metric"] for key, value in dp.get("dimensions", {}).items(): dim = pbuf_dp.dimensions.add() dim.key = key dim.value = value dpum.datapoints.extend([pbuf_dp]) resp = requests.post( f"http://localhost:{port}/v2/datapoint", headers={"Content-Type": "application/x-protobuf"}, data=dpum.SerializeToString(), ) assert resp.status_code == 200, f"Bad response: {resp.content}" assert wait_for( p(has_datapoint, agent.fake_services, dimensions={"env": "prod"})), "Didn't get datapoint with env=prod" assert wait_for( p(has_datapoint, agent.fake_services, dimensions={"env": "dev"})), "Didn't get datapoint with env=dev"
def do_GET(self): """ Dump out the received datapoints and events in a pickled byte encoding. """ obj = None if 'datapoint' in self.path: obj = sf_pbuf.DataPointUploadMessage() obj.datapoints.extend(datapoints) elif 'event' in self.path: obj = sf_pbuf.EventUploadMessage() obj.events.extend(events) else: self.send_response(404) self.end_headers() return out = obj.SerializeToString() self.send_response(200) self.send_header("Content-Type", "application/octet-stream") self.send_header("Content-Length", len(out)) self.end_headers() self.wfile.write(out)