def build_file_carving_session_archive(session_id): # get the carve session file_carving_session = (FileCarvingSession.objects.select_related( "distributed_query", "pack_query").get(pk=session_id)) if file_carving_session.archive: logger.error("Archive already exists for session %s", session_id) return # build archive file from carve blocks archive_size = 0 tmp_fh, tmp_path = tempfile.mkstemp( suffix="_osquery_file_carving_archive.tar") logger.info("Start building archive %s %s", session_id, tmp_path) with os.fdopen(tmp_fh, "wb") as f: for file_carving_block in file_carving_session.filecarvingblock_set.all( ).order_by("block_id"): for chunk in file_carving_block.file.chunks(): f.write(chunk) archive_size += len(chunk) with open(tmp_path, "rb") as f: file_carving_session.archive.save("archive.tar", File(f)) os.unlink(tmp_path) # post osquery file carve event event_cls = event_cls_from_type("osquery_file_carving") event_cls.post_machine_request_payloads( file_carving_session.serial_number, None, None, [{ "session_id": session_id, "action": "archive", "archive": { "name": file_carving_session.get_archive_name(), "size": archive_size, "url": file_carving_session.get_archive_url() } }])
def process_raw_event(self, raw_event): instance_d = raw_event["puppet_instance"] client = self.get_client(instance_d) event_type = raw_event["event_type"] try: puppet_report = yaml.load(raw_event["puppet_report"]) except: logger.exception("Could not read puppet report") return certname = puppet_report["host"] try: machine_d = client.get_machine_d(certname) except: logger.exception("Could not get machine_d. %s %s", client.get_source_d(), certname) return serial_number = machine_d["serial_number"] yield from self.update_machine(machine_d) # yield puppet event puppet_report.pop("logs") puppet_report.pop("metrics") puppet_report.pop("resource_statuses") event_cls = event_cls_from_type(event_type) yield from event_cls.build_from_machine_request_payloads( serial_number, raw_event["request"]["user_agent"], raw_event["request"]["ip"], [puppet_report], get_created_at=get_report_created_at)
def process_raw_event(self, raw_event): instance_d = raw_event["wsone_instance"] client = self._get_client(instance_d) if client is None: return # event from an excluded group? wsone_event = raw_event["wsone_event"] if client.is_excluded_event(wsone_event): return # update device if possible device_id = wsone_event.pop("DeviceId", None) if device_id: yield from self._update_machine(client, device_id) else: logger.warning("Workspace ONE event without DeviceId") # yield wsone event serial_number = wsone_event.pop("SerialNumber") if not serial_number: logger.error("Workspace ONE event without SerialNumber") return wsone_event_type = wsone_event.pop("EventType", None) if not wsone_event_type: logger.error("Workspace ONE event without EventType") return event_type = None payload = {k: v for k, v in wsone_event.items() if v} if wsone_event_type == "Compliance Status Changed": event_type = "wsone_compliance_status_changed" elif wsone_event_type == "Compromised Status Changed": event_type = "wsone_compromised_status_changed" elif wsone_event_type == "Device Operating System Changed": event_type = "wsone_os_changed" elif wsone_event_type == "Device Organization Group Changed": event_type = "wsone_organization_group_changed" elif wsone_event_type == "MDM Enrollment Complete": event_type = "wsone_mdm_enrollment_complete" else: logger.warning("Unknown Workspace ONE event type: %s", wsone_event_type) def get_created_at(payload): try: return datetime.fromisoformat(payload.pop("EventTime")[:26]) except Exception: pass if event_type: event_cls = event_cls_from_type(event_type) yield from event_cls.build_from_machine_request_payloads( serial_number, raw_event["request"]["user_agent"], raw_event["request"]["ip"], [payload], get_created_at=get_created_at, observer=raw_event["observer"] )
def update_machine(self, client, device_type, jamf_id): logger.info("Update machine %s %s %s", client.get_source_d(), device_type, jamf_id) try: machine_d = client.get_machine_d(device_type, jamf_id) except: logger.exception("Could not get machine_d. %s %s %s", client.get_source_d(), device_type, jamf_id) else: try: msc, ms = MachineSnapshotCommit.objects.commit_machine_snapshot_tree( machine_d) except: logger.exception("Could not commit machine snapshot") else: if msc: for idx, (event_type, created_at, payload) in enumerate( inventory_events_from_machine_snapshot_commit( msc)): event_cls = event_cls_from_type(event_type) metadata = EventMetadata( event_cls.event_type, machine_serial_number=ms.serial_number, index=idx, created_at=created_at, tags=event_cls.tags) event = event_cls(metadata, payload) yield event
def iter_inventory_events(msn, events): event_uuid = uuid.uuid4() for index, (event_type, created_at, data) in enumerate(events): event_cls = event_cls_from_type(event_type) metadata = EventMetadata(machine_serial_number=msn, uuid=event_uuid, index=index, created_at=created_at) yield event_cls(metadata, data)
def post_inventory_events(msn, events, uuid, index): for event_type, data in events: event_cls = event_cls_from_type(event_type) metadata = EventMetadata( event_cls.event_type, machine_serial_number=msn, uuid=uuid, index=index, tags=event_cls.tags ) event = event_cls(metadata, data) event.post() index += 1 return index
def post_inventory_events(msn, events): for index, (event_type, created_at, data) in enumerate(events): event_cls = event_cls_from_type(event_type) metadata = EventMetadata(event_cls.event_type, machine_serial_number=msn, index=index, created_at=created_at, tags=event_cls.tags) event = event_cls(metadata, data) event.post()
def _deserialize_event(self, doc): doc.pop('stored_at') event_type = doc.pop('event_type') payload = doc.pop('payload') user_agent, ip = doc.pop('user_agent'), doc.pop('ip') if user_agent or ip: doc['request'] = EventRequest(user_agent, ip) else: doc['request'] = None event_cls = event_cls_from_type(event_type) event = event_cls(EventMetadata(event_type, **doc), payload) return event
def post_nagios_event(nagios_instance, user_agent, ip, data): event_type = data.pop("event_type", None) if not event_type: logger.warning("Missing event_type in nagios event payload") return elif event_type not in ['nagios_host_event', 'nagios_service_event']: logger.warning("Wrong event_type %s in nagios event payload", event_type) return data["nagios_instance"] = {"id": nagios_instance.id, "url": nagios_instance.url} event_cls = event_cls_from_type(event_type) event_cls.post_machine_request_payloads(None, user_agent, ip, [data])
def _deserialize_event(self, doc): doc.pop("stored_at") event_type = doc.pop("event_type") payload = doc.pop("payload") user_agent, ip = doc.pop("user_agent"), doc.pop("ip") if user_agent or ip: doc["request"] = EventRequest(user_agent, ip) else: doc["request"] = None event_cls = event_cls_from_type(event_type) event = event_cls(EventMetadata(event_type, **doc), payload) event_handler.apply_middlewares(event) return event
def process_raw_event(self, raw_event): jamf_instance_d = raw_event["jamf_instance"] jamf_instance_key, client = self._get_client(jamf_instance_d) event_type = raw_event["event_type"] jamf_event = raw_event["jamf_event"] self._cleanup_jamf_event(jamf_event) if event_type == "jamf_smart_group_computer_membership_change" \ or event_type == "jamf_smart_group_mobile_device_membership_change": if jamf_event.get("computer"): device_type = "computer" else: device_type = "mobile_device" jamf_group_id = jamf_event["jssid"] is_smart = jamf_event["smartGroup"] # find missing machines and machines still in the group # update them yield from self._update_group_machines(client, device_type, jamf_group_id, is_smart) elif event_type == "jamf_computer_policy_finished": policy_id = jamf_event["policyId"] policy_d = self._get_policy_general_info(jamf_instance_key, client, policy_id) if policy_d: jamf_event["policy"] = policy_d else: logger.error("Could not get policy %s/%s general information", jamf_instance_key, policy_id) else: # enrich jamf event ? pass serial_number = raw_event.get("serial_number") # machine needs update ? if event_type == "jamf_computer_inventory_completed" \ or (serial_number and not self._is_known_machine(client, serial_number)): device_type = raw_event.get("device_type") jamf_machine_id = raw_event.get("jamf_id") yield from self._update_machine(client, device_type, jamf_machine_id) # yield jamf event event_cls = event_cls_from_type(event_type) yield from event_cls.build_from_machine_request_payloads( serial_number, raw_event["request"]["user_agent"], raw_event["request"]["ip"], [jamf_event], observer=raw_event.pop("observer", None))
def post_jamf_webhook_event(jamf_instance, user_agent, ip, data): jamf_event = data["webhook"]["webhookEvent"] event_type = 'jamf_{}'.format(JAMF_EVENTS[jamf_event][0]) payload = data["event"] # device event ? device_type = None if jamf_event.startswith("Computer"): device_type = "computer" elif jamf_event.startswith("MobileDevice"): device_type = "mobile_device" observer_dict = jamf_instance.observer_dict() if device_type is not None \ or event_type == "jamf_smart_group_computer_membership_change" \ or event_type == "jamf_smart_group_mobile_device_membership_change": # event needs preprocessing raw_event = { "request": { "user_agent": user_agent, "ip": ip }, "observer": observer_dict, "event_type": event_type, "jamf_instance": jamf_instance.serialize(), "jamf_event": payload } if device_type: try: jamf_id = payload["computer"]["jssID"] serial_number = payload["computer"]["serialNumber"] except KeyError: jamf_id = payload["jssID"] serial_number = payload["serialNumber"] raw_event.update({ "device_type": device_type, "jamf_id": jamf_id, "serial_number": serial_number, }) queues.post_raw_event("jamf_events", raw_event) else: # event doesn't need preprocessing event_cls = event_cls_from_type(event_type) msn = payload.get("serialNumber", None) event_cls.post_machine_request_payloads(msn, user_agent, ip, [payload], observer=observer_dict)
def post_nagios_event(nagios_instance, user_agent, ip, data): event_type = data.pop("event_type", None) if not event_type: logger.warning("Missing event_type in nagios event payload") return elif event_type not in ['nagios_host_event', 'nagios_service_event']: logger.warning("Wrong event_type %s in nagios event payload", event_type) return data["nagios_instance"] = { "id": nagios_instance.id, "url": nagios_instance.url } event_cls = event_cls_from_type(event_type) event_cls.post_machine_request_payloads(None, user_agent, ip, [data])
def _deserialize_event(self, doc): doc.pop('stored_at') event_type = doc.pop('event_type') payload = doc.pop('payload') request_d = { k: v for k, v in ((a, doc.pop(a)) for a in ('user_agent', 'ip', 'user')) if v } if request_d: doc['request'] = EventRequest.deserialize(request_d) else: doc['request'] = None event_cls = event_cls_from_type(event_type) event = event_cls(EventMetadata(event_type, **doc), payload) return event
def post_jamf_event(jamf_instance, user_agent, ip, data): jamf_event = data["webhook"]["webhookEvent"] event_type = 'jamf_{}'.format(JAMF_EVENTS[jamf_event][0]) payload = data["event"] # add origin to jamf event payload["jamf_instance"] = { "host": jamf_instance.host, "path": jamf_instance.path, "port": jamf_instance.port } # device event ? device_type = None if jamf_event.startswith("Computer"): device_type = "computer" elif jamf_event.startswith("MobileDevice"): device_type = "mobile_device" if device_type is not None \ or event_type == "jamf_smart_group_computer_membership_change" \ or event_type == "jamf_smart_group_mobile_device_membership_change": # event needs preprocessing raw_event = { "request": { "user_agent": user_agent, "ip": ip }, "event_type": event_type, "jamf_instance": jamf_instance.serialize(), "jamf_event": payload } if device_type: raw_event.update({ "device_type": device_type, "jamf_id": payload["jssID"], "serial_number": payload["serialNumber"], }) queues.post_raw_event("jamf_events", raw_event) else: # event doesn't need preprocessing event_cls = event_cls_from_type(event_type) msn = payload.get("serialNumber", None) event_cls.post_machine_request_payloads(msn, user_agent, ip, [payload])
def process_raw_event(self, raw_event): jamf_instance_d = raw_event["jamf_instance"] client = self.get_client(jamf_instance_d) event_type = raw_event["event_type"] jamf_event = raw_event["jamf_event"] if event_type == "jamf_smart_group_computer_membership_change" \ or event_type == "jamf_smart_group_mobile_device_membership_change": if jamf_event.get("computer"): device_type = "computer" else: device_type = "mobile_device" jamf_group_id = jamf_event["jssid"] is_smart = jamf_event["smartGroup"] # find missing machines and machines still in the group # update them yield from self.update_group_machines(client, device_type, jamf_group_id, is_smart) elif event_type == "jamf_computer_push_capability_changed": # enrich jamf event ? pass serial_number = raw_event.get("serial_number") # machine needs update ? if event_type == "jamf_computer_inventory_completed" \ or event_type == "jamf_computer_checkin" \ or event_type == "jamf_mobile_device_checkin" \ or (serial_number and not self.is_known_machine(client, serial_number)): device_type = raw_event.get("device_type") jamf_machine_id = raw_event.get("jamf_id") yield from self.update_machine(client, device_type, jamf_machine_id) # yield jamf event event_cls = event_cls_from_type(event_type) yield from event_cls.build_from_machine_request_payloads( serial_number, raw_event["request"]["user_agent"], raw_event["request"]["ip"], [jamf_event] )
def update_machine(self, machine_d): logger.info("Update machine %s %s", machine_d["source"], machine_d["reference"]) try: msc, ms, _ = MachineSnapshotCommit.objects.commit_machine_snapshot_tree( machine_d) except Exception: logger.exception("Could not commit machine snapshot") else: if msc: event_uuid = uuid.uuid4() for idx, (event_type, created_at, payload) in enumerate( inventory_events_from_machine_snapshot_commit(msc)): event_cls = event_cls_from_type(event_type) metadata = EventMetadata( machine_serial_number=ms.serial_number, uuid=event_uuid, index=idx, created_at=created_at) event = event_cls(metadata, payload) yield event
def _update_machine(self, client, device_type, jamf_id): logger.info("Update machine %s %s %s", client.source_repr, device_type, jamf_id) try: machine_d, tags = client.get_machine_d_and_tags( device_type, jamf_id) except Exception: logger.exception("Could not get machine_d and tags. %s %s %s", client.source_repr, device_type, jamf_id) else: if not machine_d.get("serial_number"): logger.warning("Machine %s %s %s without serial number", client.source_repr, device_type, jamf_id) return try: with transaction.atomic(): msc, ms = MachineSnapshotCommit.objects.commit_machine_snapshot_tree( machine_d) except Exception: logger.exception("Could not commit machine snapshot") else: if msc: for idx, (event_type, created_at, payload) in enumerate( inventory_events_from_machine_snapshot_commit( msc)): event_cls = event_cls_from_type(event_type) metadata = EventMetadata( event_cls.event_type, machine_serial_number=ms.serial_number, index=idx, created_at=created_at, tags=event_cls.tags) event = event_cls(metadata, payload) yield event if tags: machine = MetaMachine(machine_d["serial_number"]) for taxonomy_id, tag_names in tags.items(): taxonomy = self._get_taxonomy(taxonomy_id) if taxonomy: machine.update_taxonomy_tags(taxonomy, tag_names)
def post_jamf_event(jamf_instance, user_agent, ip, data): jamf_event = data["webhook"]["webhookEvent"] event_type = 'jamf_{}'.format(JAMF_EVENTS[jamf_event][0]) payload = data["event"] # add origin to jamf event payload["jamf_instance"] = { "host": jamf_instance.host, "path": jamf_instance.path, "port": jamf_instance.port } # device event ? device_type = None if jamf_event.startswith("Computer"): device_type = "computer" elif jamf_event.startswith("MobileDevice"): device_type = "mobile_device" if device_type is not None \ or event_type == "jamf_smart_group_computer_membership_change" \ or event_type == "jamf_smart_group_mobile_device_membership_change": # event needs preprocessing raw_event = {"request": {"user_agent": user_agent, "ip": ip}, "event_type": event_type, "jamf_instance": jamf_instance.serialize(), "jamf_event": payload} if device_type: raw_event.update({ "device_type": device_type, "jamf_id": payload["jssID"], "serial_number": payload["serialNumber"], }) queues.post_raw_event("jamf_events", raw_event) else: # event doesn't need preprocessing event_cls = event_cls_from_type(event_type) msn = payload.get("serialNumber", None) event_cls.post_machine_request_payloads(msn, user_agent, ip, [payload])
def process_raw_event(self, raw_event): session_id = raw_event["session_id"] carve_session = CarveSession.objects.get(session_id=session_id) if carve_session.archive: logger.error("Archive already exists for session %s", session_id) return archive_size = 0 tmp_fh, tmp_path = tempfile.mkstemp(suffix=self.__module__) logger.info("Start building archive %s %s", session_id, tmp_path) with os.fdopen(tmp_fh, "wb") as f: for carve_block in carve_session.carveblock_set.all().order_by( "block_id"): for chunk in carve_block.file.chunks(): f.write(chunk) archive_size += len(chunk) with open(tmp_path, "rb") as f: carve_session.archive.save("archive.tar", File(f)) os.unlink(tmp_path) # yield osquery file carve event event_cls = event_cls_from_type("osquery_file_carve") for event in event_cls.build_from_machine_request_payloads( carve_session.machine_serial_number, None, None, [{ "probe": { "id": carve_session.probe_source.id, "name": carve_session.probe_source.name }, "session_id": carve_session.session_id, "action": "archive", "archive": { "name": carve_session.get_archive_name(), "size": archive_size, "url": carve_session.get_archive_url() } }]): yield event
def commit(self): if not self.cc_statuses: return compliance_check_statuses = [] checks = {} for query in (Query.objects.select_related( "compliance_check").prefetch_related("packquery__pack").filter( pk__in=self.cc_statuses.keys(), compliance_check__isnull=False)): query_version, status, status_time, distributed_query_pk = self.cc_statuses[ query.pk] if query.version != query_version: # outdated status continue compliance_check_statuses.append( (query.compliance_check, status, status_time)) checks[query.compliance_check.pk] = (query, status_time, distributed_query_pk) status_updates = update_machine_statuses(self.serial_number, compliance_check_statuses) event_cls = event_cls_from_type( "osquery_check_status_updated") # import cycle with osquery.events for compliance_check_pk, status_value, previous_status_value in status_updates: if status_value == previous_status_value: # status not updated, no event continue query, status_time, distributed_query_pk = checks[ compliance_check_pk] yield event_cls.build_from_query_serial_number_and_statuses( query, distributed_query_pk, self.serial_number, Status(status_value), status_time, Status(previous_status_value) if previous_status_value is not None else None, )
def post_jss_event(user_agent, ip, data): event_cls = event_cls_from_type("jss_{}".format(JSS_EVENTS[data["webhook"]["webhookEvent"]])) payload = data["event"] msn = payload.get("serialNumber", None) event_cls.post_machine_request_payloads(msn, user_agent, ip, [payload])
def post_jss_event(user_agent, ip, data): event_cls = event_cls_from_type('jss_{}'.format( JSS_EVENTS[data["webhook"]["webhookEvent"]][0])) payload = data["event"] msn = payload.get("serialNumber", None) event_cls.post_machine_request_payloads(msn, user_agent, ip, [payload])