def _json_to_message(self, json_data): if isinstance(json_data, dict) and 'uuid' in json_data: msg = Message() msg.uuid = base64.b64decode(str(json_data['uuid'])) msg.timestamp = json_data['timestamp'] msg.type = json_data['type'] msg.logger = json_data['logger'] msg.severity = json_data['severity'] msg.payload = json_data['payload'] msg.env_version = json_data['env_version'] msg.pid = json_data.get('pid', '0') msg.hostname = json_data.get('hostname', '') for field_dict in json_data.get('fields', []): f = msg.fields.add() f.value_type = PB_NAMETYPE_TO_INT[field_dict['value_type']] # Everything is raw f.representation = "" del field_dict['value_type'] del field_dict['representation'] for k, v in field_dict.items(): cls_name = getattr(f, k).__class__.__name__ if cls_name == 'RepeatedScalarFieldContainer': for v1 in v: getattr(f, k).append(v1) else: setattr(f, k, v) return msg return json_data
def test_filters_config(): cfg_txt = """ [heka] stream_class = heka.streams.DebugCaptureStream [heka_filter_sev_max] provider = heka.filters.severity_max_provider severity = 6 [heka_filter_type_whitelist] provider = heka.filters.type_whitelist_provider types = foo bar baz """ client = client_from_text_config(cfg_txt, 'heka') eq_(len(client.filters), 2) severity_max = [x for x in client.filters if x.func_name == 'severity_max'] eq_(len(severity_max), 1) severity_max = severity_max[0] eq_(severity_max.func_name, 'severity_max') msg = Message(severity=6) ok_(severity_max(msg)) msg = Message(severity=7) ok_(not severity_max(msg)) type_whitelist = [ x for x in client.filters if x.func_name == 'type_whitelist' ] eq_(len(type_whitelist), 1) type_whitelist = type_whitelist[0] eq_(type_whitelist.func_name, 'type_whitelist') msg = Message(type='bar') ok_(type_whitelist(msg)) msg = Message(type='bawlp') ok_(not type_whitelist(msg))
def test_protobuf_encoding(self): enc = ProtobufEncoder() bytes = enc.encode(SAMPLE_MSG) eq_(ord(bytes[0]), RECORD_SEPARATOR) header_len = ord(bytes[1]) header = bytes[2:2+header_len] # Now double check the header h = Header() h.ParseFromString(header) eq_(h.message_encoding, Header.MessageEncoding.Value('PROTOCOL_BUFFER')) eq_(ord(bytes[header_len+2]), UNIT_SEPARATOR) pb_data = bytes[header_len+3:] eq_(len(pb_data), h.message_length) msg = Message() msg.ParseFromString(pb_data) eq_(msg.uuid, SAMPLE_MSG.uuid) eq_(msg.timestamp, SAMPLE_MSG.timestamp) eq_(msg.payload, SAMPLE_MSG.payload) # Check the 3 fields eq_(len(msg.fields), 4) eq_(first_value(msg, 'foo'), 'bar') eq_(first_value(msg, 'blah'), 42) eq_(first_value(msg, 'cef_meta.syslog_name'), 'some-syslog-thing') eq_(first_value(msg, 'cef_meta.syslog_level'), 5)
def check_bytes(self, bytes, value): new_msg = Message() new_msg.ParseFromString(bytes) assert new_msg.uuid == '0123456789012345' assert new_msg.type == 'demo' assert new_msg.timestamp == 1000000 assert first_value(new_msg, 'myfield') == value
def decode_message(bytes): """ Decode the header and message object from raw bytes """ header_len = ord(bytes[1]) header = bytes[2:2 + header_len] # Now double check the header h = Header() h.ParseFromString(header) pb_data = bytes[header_len + 3:] msg = Message() msg.ParseFromString(pb_data) return h, msg
def setUp(self): msg = Message() msg.timestamp = int(time.time() * 1000000) msg.type = 'sentry' msg.logger = '' msg.severity = 3 msg.payload = 'some_data' msg.env_version = '0.8' msg.pid = 55 msg.hostname = 'localhost' client = HekaClient(None, None) client._flatten_fields(msg, {'foo': 'bar', 'blah': 42, 'cef_meta': {'syslog_name': 'some-syslog-thing', 'syslog_level': 5}}) msg.uuid = str(uuid.uuid5(uuid.NAMESPACE_OID, str(msg))) self.msg = msg
def dict_to_msg(py_data): """ Encode a python dictionary into a ProtocolBuffer Message object. This is only useful for testing. """ msg = Message() msg.uuid = py_data.get('uuid', uuid.uuid5(uuid.NAMESPACE_OID, str(py_data)).bytes) if len(msg.uuid) != UUID_SIZE: raise InvalidMessage("UUID must be 16 bytes long") msg.timestamp = py_data.get('timestamp', int(time.time() * 1000000)) msg.type = py_data['type'] msg.logger = py_data['logger'] msg.severity = py_data['severity'] msg.payload = py_data['payload'] msg.env_version = py_data['env_version'] msg.pid = py_data['heka_pid'] msg.hostname = py_data['heka_hostname'] _flatten_fields(msg, py_data['fields']) return msg
def test_bytes_field(self): value = 'some_bytes' enc = ProtobufEncoder() msg = Message(uuid='0123456789012345', type='demo', timestamp=1000000) f = msg.fields.add() f.name = 'myfield' f.representation = "" f.value_type = Field.BYTES f.value_bytes.append(value) bytes = msg.SerializeToString() name = 'Bytes' print "\n\n\n" print "# %s Bytes: %d" % (name, len(bytes)) print "%s Bytes:", (name, ":".join("%02x" % (ord(c)) for c in bytes)) print "\n\n" self.check_bytes(bytes, value)
def build_msg(self, value): msg = Message(uuid='0123456789012345', type='demo', timestamp=1000000) f = msg.fields.add() f.name = 'myfield' f.representation = "" if isinstance(value, types.BooleanType): f.value_type = Field.BOOL f.value_bool.append(bool(value)) elif isinstance(value, types.IntType): f.value_type = Field.INTEGER f.value_integer.append(value) elif isinstance(value, types.FloatType): f.value_type = Field.DOUBLE f.value_double.append(value) elif isinstance(value, basestring): f.value_type = Field.STRING f.value_string.append(value) return f.value_type, msg
def decode(self, bytes): msg = Message() msg.ParseFromString(bytes) return msg
def setUp(self): msg = Message() msg.timestamp = int(time.time() * 1000000) msg.type = 'sentry' msg.logger = '' msg.severity = 3 msg.payload = 'some_data' msg.env_version = '0.8' msg.pid = 55 msg.hostname = 'localhost' client = HekaClient(None, None) client._flatten_fields( msg, { 'foo': 'bar', 'blah': 42, 'cef_meta': { 'syslog_name': 'some-syslog-thing', 'syslog_level': 5 } }) msg.uuid = str(uuid.uuid5(uuid.NAMESPACE_OID, str(msg))) self.msg = msg
def setup(self): self.msg = Message(uuid='0123456789012345', type='hmac', timestamp=1000000)