def _deserialize_event(self, log_d): log_attributes = log_d["attributes"] metadata = log_attributes["attributes"] # tags for ddtag in log_attributes.get("tags", []): if ddtag.startswith("ztl-tag:"): metadata.setdefault("tags", []).append(ddtag[8:]) # created_at metadata["created_at"] = metadata.pop("@timestamp") # extra attributes to cleanup metadata.pop("service", None) metadata.pop("host", None) # request request = metadata.setdefault("request", {}) user_agent = metadata.pop("http", {}).get("useragent") if user_agent: request["user_agent"] = user_agent ip = metadata.pop("network", {}).get("client", {}).get("ip") if ip: request["ip"] = ip usr = metadata.pop("usr", None) if usr: for dd_attr, ztl_attr in (("id", "id"), ("email", "email"), ("name", "username")): val = usr.get(dd_attr) if val: request.setdefault("user", {})[ztl_attr] = val # the real event content event_type = metadata.pop("logger")["name"] metadata["type"] = event_type namespace = metadata.get("namespace", event_type) event_d = metadata.pop(namespace) event_d["_zentral"] = metadata return event_from_event_d(event_d)
def _deserialize_event(self, result): metadata = json.loads(result["_raw"]) # extract id and index from the id field try: metadata["id"], index = metadata["id"].split(":") metadata["index"] = int(index) except ValueError: # legacy event? pass # normalize serial number if self.serial_number_field in metadata: metadata["machine_serial_number"] = metadata.pop( self.serial_number_field) # drop custom host field if self.custom_host_field: metadata.pop(self.custom_host_field, None) # add created at metadata["created_at"] = result["_time"] # event type event_type = result["sourcetype"] metadata["type"] = event_type # event data namespace = metadata.get("namespace", event_type) event_d = metadata.pop(namespace) event_d["_zentral"] = metadata return event_from_event_d(event_d)
def test_event_probes_with_probe_incident(self): event = event_from_event_d(serialized_event) if self.probe_with_incident.test_event(event): event.metadata.add_probe(self.probe_with_incident) expected_serialized_probes = [{ "pk": self.probe_with_incident.pk, "name": self.probe_with_incident.name }] expected_incident_updates = [ IncidentUpdate("munki_reinstall", { "munki_pkginfo_name": "SuperApp", "munki_pkginfo_version": "0.1.0" }, Severity.CRITICAL), IncidentUpdate( "probe", {"probe_pk": self.probe_with_incident.pk}, Severity(self.probe_with_incident.incident_severity)) ] self.assertEqual(event.metadata.probes, expected_serialized_probes) self.assertEqual(event.metadata.incident_updates, expected_incident_updates) serialized_updated_event = event.serialize() self.assertEqual(serialized_updated_event["_zentral"]["probes"], expected_serialized_probes) self.assertEqual( serialized_updated_event["_zentral"]["incident_updates"], [{ "incident_type": "munki_reinstall", "key": { "munki_pkginfo_name": "SuperApp", "munki_pkginfo_version": "0.1.0" }, "severity": 300 }, { "incident_type": "probe", "key": { "probe_pk": self.probe_with_incident.pk }, "severity": 300 }]) updated_event = event_from_event_d(serialized_updated_event) self.assertEqual(list(updated_event.metadata.iter_loaded_probes()), [self.probe_with_incident]) self.assertEqual(updated_event.metadata.incident_updates, expected_incident_updates)
def _deserialize_event(self, es_doc_type, es_event_d): if es_doc_type == "_doc" or es_doc_type == self.LEGACY_DOC_TYPE: event_type = es_event_d["type"] else: event_type = es_doc_type es_event_d["type"] = event_type event_d = es_event_d.pop(event_type) event_d['_zentral'] = es_event_d return event_from_event_d(event_d)
def store_event(self, body, message): from zentral.core.events import event_from_event_d event = event_from_event_d(body) self.event_store.store(event) with producers[self.connection].acquire(block=True) as producer: producer.publish(body, serializer='json', exchange=process_events_exchange, declare=[process_events_exchange]) message.ack()
def _deserialize_event(self, es_doc_type, es_event_d): if es_doc_type == self.DOC_TYPE: # TODO VERIFY self.use_mapping_types == False event_type = es_event_d["type"] else: event_type = es_doc_type es_event_d["type"] = event_type event_d = es_event_d.pop(event_type) event_d['_zentral'] = es_event_d return event_from_event_d(event_d)
def store(self, event): self.wait_and_configure_if_necessary() if isinstance(event, dict): event = event_from_event_d(event) doc_type, body = self._serialize_event(event) try: self._es.index(index=self.index, doc_type=doc_type, body=body) if self.test: self._es.indices.refresh(self.index) except Exception: logger.exception('Could not add event to elasticsearch index')
def store(self, event): self.wait_and_configure_if_necessary() if isinstance(event, dict): event = event_from_event_d(event) doc_type, body = self._serialize_event(event) try: self._es.index(index=self.index, doc_type=doc_type, body=body) if self.test: self._es.indices.refresh(self.index) except: logger.exception('Could not add event to elasticsearch index')
def store(self, event): self.wait_and_configure_if_necessary() if isinstance(event, dict): event = event_from_event_d(event) doc_type, body = self._serialize_event(event) kwargs = {"body": body} if self.version < [7]: kwargs["doc_type"] = doc_type self._es.index(index=self.index, **kwargs) if self.test: self._es.indices.refresh(self.index)
def store(self, event): self.wait_and_configure_if_necessary() if isinstance(event, dict): event = event_from_event_d(event) with self._conn: doc = self._serialize_event(event) with self._conn.cursor() as cur: cur.execute("insert into events (machine_serial_number, " "event_type, uuid, index, user_agent, ip, payload, created_at) " "values (%(machine_serial_number)s, %(event_type)s, " "%(uuid)s, %(index)s, %(user_agent)s, %(ip)s, %(payload)s, %(created_at)s)", doc)
def store(self, event): # Build and send a request to the POST API if isinstance(event, dict): event = event_from_event_d(event) data = json.dumps(self._prepare_event(event)).encode("utf-8") rfc1123_date = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT') signature = self._build_signature(rfc1123_date, len(data)) self._session.headers.update({ 'Authorization': "SharedKey {}:{}".format(self.customer_id, signature.decode("utf-8")), 'x-ms-date': rfc1123_date, }) r = self._session.post(self._url, data=data) r.raise_for_status()
def store(self, event): self.wait_and_configure_if_necessary() if isinstance(event, dict): event = event_from_event_d(event) doc_type, body = self._serialize_event(event) kwargs = {"body": body} if self.version < [7]: kwargs["doc_type"] = doc_type try: self._es.index(index=self.index, **kwargs) if self.test: self._es.indices.refresh(self.index) except Exception: logger.exception('Could not add event to elasticsearch index')
def test_event_from_event_d(self): event = event_from_event_d(serialized_event) # event type self.assertEqual(event.metadata.event_type, "inventory_heartbeat") # incident updates self.assertEqual(len(event.metadata.incident_updates), 1) incident_update = event.metadata.incident_updates[0] self.assertEqual( incident_update, IncidentUpdate("munki_reinstall", { "munki_pkginfo_name": "SuperApp", "munki_pkginfo_version": "0.1.0" }, Severity.CRITICAL)) # linked objects self.assertEqual(event.metadata.objects, { "yolo": [["17", "42"], ["11"]], "machine_incident": [["42"]] })
def process_event(self, body, message): from zentral.core.events import event_from_event_d event = event_from_event_d(body) self.event_processor.process(event) message.ack()
def _get_job(self, job_q, worker_q): from zentral.core.events import event_from_event_d event_id = self._r.brpoplpush(job_q, worker_q) event_payload = self._r.get(event_id) return event_id, event_from_event_d(json.loads(event_payload.decode("utf-8")))
def _deserialize_event(self, event_type, es_event_d): event_d = es_event_d.pop(event_type) event_d['_zentral'] = es_event_d event_d['_zentral']['type'] = event_type return event_from_event_d(event_d)
def _deserialize_event(self, event_d): event_d['_zentral'] = event_d.pop('zzzentral') return event_from_event_d(event_d)
def _deserialize_event(self, event_type, es_event_d): event_d = es_event_d.pop(event_type) event_d['_zentral'] = es_event_d event_d['_zentral']['type'] = event_type return event_from_event_d(event_d)