def do_post(self, data): machine_serial_number = data['serial_num'] assert(machine_serial_number == self.machine_serial_number) post_santa_preflight(machine_serial_number, self.user_agent, self.ip, data) os_version = dict(zip(('major', 'minor', 'patch'), (int(s) for s in data['os_version'].split('.')))) os_version.update({'name': 'Mac OS X', 'build': data['os_build']}) tree = {'source': {'module': 'zentral.contrib.santa', 'name': 'Santa'}, 'reference': machine_serial_number, 'serial_number': machine_serial_number, 'os_version': os_version, 'system_info': {'computer_name': data['hostname']}, 'public_ip_address': self.ip, } if self.business_unit: tree['business_unit'] = self.business_unit.serialize() commit_machine_snapshot_and_trigger_events(tree) return {'BatchSize': 20, # TODO: ??? 'UploadLogsUrl': 'https://{host}{path}'.format(host=self.request.get_host(), path=reverse('santa:logupload', args=(self.machine_id,)))}
def do_post(self, data): machine_serial_number = data['serial_num'] assert (machine_serial_number == self.machine_serial_number) post_santa_preflight(machine_serial_number, self.user_agent, self.ip, data) os_version = dict( zip(('major', 'minor', 'patch'), (int(s) for s in data['os_version'].split('.')))) os_version.update({'name': 'Mac OS X', 'build': data['os_build']}) tree = { 'source': { 'module': 'zentral.contrib.santa', 'name': 'Santa' }, 'reference': machine_serial_number, 'serial_number': machine_serial_number, 'os_version': os_version, 'system_info': { 'computer_name': data['hostname'] }, 'public_ip_address': self.ip, } if self.business_unit: tree['business_unit'] = self.business_unit.serialize() commit_machine_snapshot_and_trigger_events(tree) return { 'BatchSize': 20, # TODO: ??? 'UploadLogsUrl': 'https://{host}{path}'.format(host=self.request.get_host(), path=reverse( 'santa:logupload', args=(self.machine_id, ))) }
def commit_machine_snapshot(self, data): # os version build = data["os_build"] os_version = dict( zip(('major', 'minor', 'patch'), (int(s) for s in data['os_version'].split('.')))) os_version.update({'name': 'macOS', 'build': build}) try: os_version.update(macos_version_from_build(build)) except ValueError: pass # tree tree = { 'source': { 'module': 'zentral.contrib.santa', 'name': 'Santa' }, 'reference': self.hardware_uuid, 'serial_number': self.machine_serial_number, 'os_version': os_version, 'system_info': { 'computer_name': data['hostname'] }, 'public_ip_address': self.ip, } if self.business_unit: tree['business_unit'] = self.business_unit.serialize() commit_machine_snapshot_and_trigger_events(tree)
def do_post(self, data): post_preflight_event(self.machine_serial_number, self.user_agent, self.ip, data) os_version = dict(zip(('major', 'minor', 'patch'), (int(s) for s in data['os_version'].split('.')))) os_version.update({'name': 'Mac OS X', 'build': data['os_build']}) tree = {'source': {'module': 'zentral.contrib.santa', 'name': 'Santa'}, 'serial_number': self.machine_serial_number, 'os_version': os_version, 'system_info': {'computer_name': data['hostname']}, 'public_ip_address': self.ip, } if self.enrolled_machine: # new way tree["reference"] = self.enrolled_machine.machine_id else: # old way # TODO: remove it tree["reference"] = self.machine_serial_number if self.business_unit: tree['business_unit'] = self.business_unit.serialize() commit_machine_snapshot_and_trigger_events(tree) config_dict = {'UploadLogsUrl': 'https://{host}{path}'.format(host=self.request.get_host(), path=reverse('santa:logupload', args=(self.machine_id,)))} if self.enrolled_machine: config_dict.update(self.enrolled_machine.enrollment.configuration.get_sync_server_config()) else: config_dict['BatchSize'] = Configuration.DEFAULT_BATCH_SIZE return config_dict
def commit_machine_snapshot(self, max_age=3600): module = 'zentral.contrib.jamf_protect' if MetaMachine(self.serial_number).has_recent_source_snapshot( module, max_age): logger.debug( "Skip Jamf Protect machine snapshot commit for machine %s.", self.serial_number) return tree = { 'source': { 'module': module, 'name': 'Jamf Protect' }, 'reference': self.serial_number, 'serial_number': self.serial_number, 'public_ip_address': self.ip } hostname = self.event.get("host", {}).get("hostname") if hostname: tree['system_info'] = {'computer_name': hostname} business_unit = self.enrolled_machine.enrollment.secret.get_api_enrollment_business_unit( ) if business_unit: tree['business_unit'] = business_unit.serialize() commit_machine_snapshot_and_trigger_events(tree)
def commit_inventory_query_result(self, snapshot): tree = self.ms.serialize() tree["serial_number"] = self.machine_serial_number tree["public_ip_address"] = self.ip if self.business_unit: tree['business_unit'] = self.business_unit.serialize() def clean_dict(d): for k, v in list(d.items()): if v is None or v == "": del d[k] return d deb_packages = [] network_interfaces = [] osx_app_instances = [] for t in snapshot: table_name = t.pop('table_name') if table_name == 'os_version': os_version = clean_dict(t) if os_version: tree['os_version'] = os_version elif table_name == 'system_info': system_info = clean_dict(t) if system_info: tree['system_info'] = system_info if table_name == 'deb_packages': deb_package = clean_dict(t) if deb_package: if deb_package not in deb_packages: deb_packages.append(deb_package) else: logger.warning("Duplicated deb package") elif table_name == 'network_interface': network_interface = clean_dict(t) if network_interface: if network_interface not in network_interfaces: network_interfaces.append(network_interface) else: logger.warning("Duplicated network interface") elif table_name == 'apps': bundle_path = t.pop('bundle_path') osx_app = clean_dict(t) if osx_app and bundle_path: osx_app_instance = { 'app': osx_app, 'bundle_path': bundle_path } if osx_app_instance not in osx_app_instances: osx_app_instances.append(osx_app_instance) else: logger.warning("Duplicated osx app instance") if deb_packages: tree["deb_packages"] = deb_packages if network_interfaces: tree["network_interfaces"] = network_interfaces if osx_app_instances: tree["osx_app_instances"] = osx_app_instances commit_machine_snapshot_and_trigger_events(tree)
def commit_inventory_query_result(self, snapshot): tree = self.get_machine_snapshot().serialize() tree["serial_number"] = self.machine_serial_number tree["public_ip_address"] = self.ip if self.business_unit: tree['business_unit'] = self.business_unit.serialize() update_tree_with_inventory_query_snapshot(tree, snapshot) commit_machine_snapshot_and_trigger_events(tree)
def do_acknowledged(self): # TODO: QUICK AND DIRTY first command query_responses = self.payload.get("QueryResponses") if query_responses: commit_machine_snapshot_and_trigger_events( tree_from_payload(self.udid, self.serial_number, self.meta_business_unit, query_responses)) self.post_event("success", command_uuid=self.payload["CommandUUID"]) return HttpResponse()
def do_post(self): enrollment = self.es_request.enrollment_secret.osquery_enrollment # update or create enrolled machine enrolled_machine_defaults = {"node_key": get_random_string(32)} try: enrolled_machine_defaults["platform_mask"] = int(self.data["platform_type"]) except (KeyError, ValueError, TypeError): logger.error("Could not get platform_mask from enrollment data") try: enrolled_machine_defaults["osquery_version"] = self.data["host_details"]["osquery_info"]["version"] except KeyError: logger.error("Could not get osquery version from enrollment data") enrolled_machine, _ = EnrolledMachine.objects.update_or_create( enrollment=enrollment, serial_number=self.serial_number, defaults=enrolled_machine_defaults ) # apply enrollment secret tags for tag in enrollment.secret.tags.all(): MachineTag.objects.get_or_create(serial_number=self.serial_number, tag=tag) # delete other enrolled machines other_enrolled_machines = (EnrolledMachine.objects.exclude(pk=enrolled_machine.pk) .filter(serial_number=self.serial_number)) if other_enrolled_machines.count(): enrollment_action = 're-enrollment' other_enrolled_machines.delete() else: enrollment_action = 'enrollment' # create machine snapshot if necessary if not MachineSnapshot.objects.filter(source__module="zentral.contrib.osquery", source__name="osquery", serial_number=self.serial_number, reference=enrolled_machine.node_key).exists(): tree = {"source": {"module": "zentral.contrib.osquery", "name": "osquery"}, "serial_number": self.serial_number, "reference": enrolled_machine.node_key, "public_ip_address": self.ip} business_unit = enrollment.secret.get_api_enrollment_business_unit() if business_unit: tree["business_unit"] = business_unit.serialize() update_tree_with_enrollment_host_details(tree, self.data.get("host_details")) commit_machine_snapshot_and_trigger_events(tree) post_enrollment_event(self.serial_number, self.user_agent, self.ip, {'action': enrollment_action}) return {'node_key': enrolled_machine.node_key}
def enroll(serial_number, business_unit, host_identifier, ip): source_module = "zentral.contrib.osquery" source_name = "osquery" try: msc = (MachineSnapshotCommit.objects.filter(source__name=source_name, source__module=source_module, serial_number=serial_number) .order_by("-version"))[0] except IndexError: action = 'enrollment' tree = {'source': {'module': source_module, 'name': source_name}, 'reference': get_random_string(64), 'serial_number': serial_number} if host_identifier: tree["system_info"] = {"computer_name": host_identifier} else: action = 're-enrollment' tree = msc.machine_snapshot.serialize() if business_unit: tree['business_unit'] = business_unit.serialize() if ip: tree["public_ip_address"] = ip ms = commit_machine_snapshot_and_trigger_events(tree) if not ms: logger.error("Enrollment error. Could not commit tree") return ms, action
def sync(self): seen_machines = [] inventory_source = None for machine_d in self.get_machines(): source = copy.deepcopy(self.source) try: serial_number = machine_d['serial_number'] except KeyError: logger.warning( 'Machine w/o serial number. Client "%s". Reference "%s"', self.name, machine_d.get('reference', 'Unknown')) continue if not serial_number: logger.warning( 'Machine serial number blank. Client "%s". Reference "%s"', self.name, machine_d.get('reference', 'Unknown')) continue seen_machines.append(serial_number) # source will be modified by mto machine_d['source'] = source for group_d in machine_d.get('groups', []): group_d['source'] = source business_unit_d = machine_d.get('business_unit', None) if business_unit_d: business_unit_d['source'] = source # save all ms = commit_machine_snapshot_and_trigger_events(machine_d) if inventory_source is None and ms: inventory_source = ms.source if seen_machines and inventory_source: (CurrentMachineSnapshot.objects.filter( source=inventory_source).exclude( serial_number__in=seen_machines).delete())
def enroll(enrollment, serial_number, business_unit, host_identifier, ip): node_key = None if enrollment: # new way enrolled_machine = get_or_create_enrolled_machine( enrollment, serial_number) node_key = enrolled_machine.node_key # apply the enrollment secret tags for tag in enrollment.secret.tags.all(): MachineTag.objects.get_or_create(serial_number=serial_number, tag=tag) # machine snapshot commit action, tree = get_or_create_machine_snapshot(serial_number, host_identifier, node_key) # update and commit the machine snapshot tree if business_unit: tree['business_unit'] = business_unit.serialize() if ip: tree["public_ip_address"] = ip ms = commit_machine_snapshot_and_trigger_events(tree) if not ms: logger.error( "Could not commit machine snapshot tree during the osquery enrollment" ) return ms, action
def sync(self): seen_machines = [] inventory_source = None for machine_d in self.get_machines(): source = copy.deepcopy(self.source) try: serial_number = machine_d['serial_number'] except KeyError: logger.warning('Machine w/o serial number. Client "%s". Reference "%s"', self.name, machine_d.get('reference', 'Unknown')) continue if not serial_number: logger.warning('Machine serial number blank. Client "%s". Reference "%s"', self.name, machine_d.get('reference', 'Unknown')) continue seen_machines.append(serial_number) # source will be modified by mto machine_d['source'] = source for group_d in machine_d.get('groups', []): group_d['source'] = source business_unit_d = machine_d.get('business_unit', None) if business_unit_d: business_unit_d['source'] = source # save all ms = commit_machine_snapshot_and_trigger_events(machine_d) if inventory_source is None and ms: inventory_source = ms.source if seen_machines and inventory_source: (CurrentMachineSnapshot.objects.filter(source=inventory_source) .exclude(serial_number__in=seen_machines) .delete())
def commit_device_information_command_response(meta_business_unit, enrolled_device, payload): query_responses = payload.get("QueryResponses") if query_responses: return commit_machine_snapshot_and_trigger_events( tree_from_payload(enrolled_device.udid, enrolled_device.serial_number, meta_business_unit, query_responses))
def do_post(self, data): ms_tree = data['machine_snapshot'] ms_tree['source'] = { 'module': 'zentral.contrib.munki', 'name': 'Munki' } machine = ms_tree.pop('machine', None) if machine: # TODO deprecated ms_tree['serial_number'] = machine['serial_number'] ms_tree['reference'] = ms_tree['serial_number'] ms_tree['public_ip_address'] = self.ip if data.get('include_santa_fileinfo', False): clean_certs_datetime(ms_tree) if self.business_unit: ms_tree['business_unit'] = self.business_unit.serialize() ms = commit_machine_snapshot_and_trigger_events(ms_tree) if not ms: raise RuntimeError("Could not commit machine snapshot") msn = ms.serial_number else: msn = ms_tree['reference'] reports = [(parser.parse(r.pop('start_time')), parser.parse(r.pop('end_time')), r) for r in data.pop('reports')] # Events event_data = { "request_type": "postflight", "include_santa_fileinfo": data.get('include_santa_fileinfo', False) } if self.enrollment: event_data["enrollment"] = {"pk": self.enrollment.pk} post_munki_request_event(msn, self.user_agent, self.ip, **event_data) post_munki_events(msn, self.user_agent, self.ip, (r for _, _, r in reports)) # MunkiState update_dict = {'user_agent': self.user_agent, 'ip': self.ip} if data.get('santa_fileinfo_included', False): update_dict['binaryinfo_last_seen'] = timezone.now() if reports: reports.sort() start_time, end_time, report = reports[-1] update_dict.update({ 'munki_version': report.get('munki_version', None), 'sha1sum': report['sha1sum'], 'run_type': report['run_type'], 'start_time': start_time, 'end_time': end_time }) with transaction.atomic(): MunkiState.objects.update_or_create(machine_serial_number=msn, defaults=update_dict) return {}
def _commit_machine_snapshot(self): # os version build = self.request_data["os_build"] os_version = dict( zip(('major', 'minor', 'patch'), (int(s) for s in self.request_data['os_version'].split('.')))) os_version.update({'name': 'macOS', 'build': build}) try: os_version.update(macos_version_from_build(build)) except ValueError: pass # tree tree = { 'source': { 'module': 'zentral.contrib.santa', 'name': 'Santa' }, 'reference': self.hardware_uuid, 'serial_number': self.enrolled_machine.serial_number, 'os_version': os_version, 'system_info': { 'computer_name': self.request_data['hostname'] }, 'public_ip_address': self.ip, } # tree primary user primary_user = self._get_primary_user() if primary_user: tree['principal_user'] = { 'source': { 'type': PrincipalUserSource.SANTA_MACHINE_OWNER }, 'unique_id': primary_user, 'principal_name': primary_user, } # tree business unit business_unit = self.enrolled_machine.enrollment.secret.get_api_enrollment_business_unit( ) if business_unit: tree['business_unit'] = business_unit.serialize() commit_machine_snapshot_and_trigger_events(tree)
def do_node_post(self): records = self.data.pop("data", []) if not records: logger.warning("No records found") return {} records.sort(key=lambda r: r.get("unixTime", 0)) self.process_decorations(records) log_type = self.data.get("log_type") if log_type == "result": results = [] last_inventory_snapshot = None for record in records: if record.get("name") == INVENTORY_QUERY_NAME: last_inventory_snapshot = record.get("snapshot") else: results.append(record) if last_inventory_snapshot: tree = {"source": {"module": "zentral.contrib.osquery", "name": "osquery"}, "serial_number": self.machine.serial_number, "reference": self.enrolled_machine.node_key, "public_ip_address": self.ip} business_unit = self.enrollment.secret.get_api_enrollment_business_unit() if business_unit: tree["business_unit"] = business_unit.serialize() update_tree_with_inventory_query_snapshot(tree, last_inventory_snapshot) commit_machine_snapshot_and_trigger_events(tree) post_results(self.machine.serial_number, self.user_agent, self.ip, results) elif log_type == "status": # TODO: configuration option to filter some of those (severity) or maybe simply ignore them post_status_logs(self.machine.serial_number, self.user_agent, self.ip, records) else: logger.error("Unknown log type %s", log_type) return {}
def do_post(self, data): ms_tree = data['machine_snapshot'] ms_tree['source'] = {'module': 'zentral.contrib.munki', 'name': 'Munki'} machine = ms_tree.pop('machine', None) if machine: # TODO deprecated ms_tree['serial_number'] = machine['serial_number'] ms_tree['reference'] = ms_tree['serial_number'] ms_tree['public_ip_address'] = self.ip if data.get('include_santa_fileinfo', False): clean_certs_datetime(ms_tree) if self.business_unit: ms_tree['business_unit'] = self.business_unit.serialize() ms = commit_machine_snapshot_and_trigger_events(ms_tree) if not ms: raise RuntimeError("Could not commit machine snapshot") msn = ms.serial_number else: msn = ms_tree['reference'] reports = [(parser.parse(r.pop('start_time')), parser.parse(r.pop('end_time')), r) for r in data.pop('reports')] # Events post_munki_request_event(msn, self.user_agent, self.ip, request_type="postflight", include_santa_fileinfo=data.get('include_santa_fileinfo', False)) post_munki_events(msn, self.user_agent, self.ip, (r for _, _, r in reports)) # MunkiState update_dict = {'user_agent': self.user_agent, 'ip': self.ip} if data.get('santa_fileinfo_included', False): update_dict['binaryinfo_last_seen'] = timezone.now() if reports: reports.sort() start_time, end_time, report = reports[-1] update_dict.update({'munki_version': report.get('munki_version', None), 'sha1sum': report['sha1sum'], 'run_type': report['run_type'], 'start_time': start_time, 'end_time': end_time}) with transaction.atomic(): MunkiState.objects.update_or_create(machine_serial_number=msn, defaults=update_dict) return {}
def update_device_command(meta_business_unit, enrolled_device, command_uuid, payload_status, payload): # less specific than update_device_artifact_command. MUST RUN AFTERWARD. # find command try: device_command = DeviceCommand.objects.get( enrolled_device=enrolled_device, uuid=command_uuid) except DeviceCommand.DoesNotExist: logger.exception("Could not find device command %s", command_uuid) return # update device command device_command.status_code = payload_status device_command.result_time = timezone.now() device_command.save() request_type = device_command.request_type if request_type == "DeviceConfigured": if payload_status == DeviceCommand.STATUS_CODE_ACKNOWLEDGED: if enrolled_device.awaiting_configuration: enrolled_device.awaiting_configuration = False enrolled_device.save() else: logger.error( "Enrolled device %s is not awaiting configuration!", enrolled_device.udid) else: logger.error( "DeviceConfigured command unexpected status %s for device %s", payload_status, enrolled_device.udid) elif request_type == "DeviceInformation": query_responses = payload.get("QueryResponses") if query_responses: return commit_machine_snapshot_and_trigger_events( tree_from_payload(enrolled_device.udid, enrolled_device.serial_number, meta_business_unit, query_responses)) else: logger.error( "Empty or absent QueryResponses in a DeviceInformation response." ) return device_command
def do_sync_inventory(instance, client, serialized_event_request=None): post_sync_started_event(instance, serialized_event_request) seen_machines = [] inventory_source = None machines_synced = 0 machines_removed = 0 error = None start_t = time.time() try: for ms_tree in client.iter_machine_snapshot_trees(): seen_machines.append(ms_tree["serial_number"]) ms = None with transaction.atomic(): ms = commit_machine_snapshot_and_trigger_events(ms_tree) if inventory_source is None and ms: inventory_source = ms.source machines_synced += 1 if seen_machines and inventory_source: with transaction.atomic(): machines_removed, _ = ( CurrentMachineSnapshot.objects.filter(source=inventory_source) .exclude(serial_number__in=seen_machines) .delete() ) except Exception as e: logger.exception("Workspace ONE instance sync error") error = str(e) result = { "status": "SUCCESS" if error is None else "FAILURE", "machines_synced": machines_synced, "machines_removed": machines_removed, "duration": int(time.time() - start_t), } if error: result["error"] = error post_sync_finished_event(instance, serialized_event_request, result) return result
def commit_inventory_query_result(self, snapshot): tree = self.get_machine_snapshot().serialize() tree["serial_number"] = self.machine_serial_number tree["public_ip_address"] = self.ip if self.business_unit: tree['business_unit'] = self.business_unit.serialize() def clean_dict(d): for k, v in list(d.items()): if isinstance(v, str): v = v.strip() if v is None or v == "": del d[k] elif v != d[k]: d[k] = v return d deb_packages = [] network_interfaces = [] osx_app_instances = [] azure_ad_info = {} for t in snapshot: table_name = t.pop('table_name') if table_name == 'os_version': os_version = clean_dict(t) if os_version: tree['os_version'] = os_version elif table_name == 'system_info': system_info = clean_dict(t) if system_info: tree['system_info'] = system_info elif table_name == 'uptime': try: system_uptime = int(t['total_seconds']) except (KeyError, TypeError, ValueError): pass else: if system_uptime > 0: tree['system_uptime'] = system_uptime elif table_name == 'network_interface': network_interface = clean_dict(t) if network_interface: if network_interface not in network_interfaces: network_interfaces.append(network_interface) else: logger.warning("Duplicated network interface") elif table_name == 'deb_packages': deb_package = clean_dict(t) if deb_package: if deb_package not in deb_packages: deb_packages.append(deb_package) else: logger.warning("Duplicated deb package") elif table_name == 'apps': bundle_path = t.pop('bundle_path') osx_app = clean_dict(t) if osx_app and bundle_path: osx_app_instance = { 'app': osx_app, 'bundle_path': bundle_path } if osx_app_instance not in osx_app_instances: osx_app_instances.append(osx_app_instance) else: logger.warning("Duplicated osx app instance") elif table_name == 'azure_ad_certificate': common_name = t.get("common_name") if common_name: azure_ad_info["device_unique_id"] = common_name elif table_name == 'azure_ad_user_info': # TODO: verify users count = 1! azure_ad_info["local_user_name"] = t.get('username') key = t.get("key") value = t.get("value") if key == "aadUniqueId": azure_ad_info["user_unique_id"] = value elif key == "aadUserId": azure_ad_info["user_id"] = value if deb_packages: tree["deb_packages"] = deb_packages if network_interfaces: tree["network_interfaces"] = network_interfaces if osx_app_instances: tree["osx_app_instances"] = osx_app_instances if azure_ad_info: tree["azure_ad_info"] = azure_ad_info commit_machine_snapshot_and_trigger_events(tree)
def commit_tree(self): commit_machine_snapshot_and_trigger_events( tree_from_payload(self.udid, self.serial_number, self.meta_business_unit, self.payload))
def commit_tree_from_payload(udid, serial_number, meta_business_unit, payload): tree = { "source": { "module": "zentral.contrib.mdm", "name": "MDM" }, "reference": udid, "serial_number": serial_number } # Mobile device IDs for attr in ("IMEI", "MEID"): val = payload.get(attr) if val: tree[attr.lower()] = val # BU try: tree[ "business_unit"] = meta_business_unit.api_enrollment_business_units( )[0].serialize() except IndexError: pass # OS Version os_version = payload.get("OSVersion") build_version = payload.get("BuildVersion") if os_version: d = dict( zip(('major', 'minor', 'patch'), (int(s) for s in os_version.split('.')))) if build_version: d["build"] = build_version tree["os_version"] = d # System Info system_info_d = {} for si_attr, attr in ( ("computer_name", "DeviceName"), ("hardware_model", "ProductName"), # iPad5,2, seen during User Enrollment ("hardware_model", "Model"), # MacBookPro11,1 ("hardware_serial", "SerialNumber")): if system_info_d.get(si_attr): continue val = payload.get(attr) if val: system_info_d[si_attr] = val if system_info_d: tree["system_info"] = system_info_d # OS Version os_version = payload.get("OSVersion") build_version = payload.get("BuildVersion") if os_version: d = dict( zip(('major', 'minor', 'patch'), (int(s) for s in os_version.split('.')))) if build_version: d["build"] = build_version hardware_model = system_info_d.get("hardware_model") if hardware_model: hardware_model = hardware_model.upper() if "IPOD" in hardware_model or "IPHONE" in hardware_model: d["name"] = Platform.iOS.value elif "IPAD" in hardware_model: if d["major"] >= 13: d["name"] = Platform.iPadOS.value else: d["name"] = Platform.iOS.value elif "TV" in hardware_model: d["name"] = Platform.tvOS.value else: # No watchOS d["name"] = Platform.macOS.value tree["os_version"] = d commit_machine_snapshot_and_trigger_events(tree) return tree
def commit_inventory_query_result(self, snapshot): tree = { 'source': { 'module': self.ms.source.module, 'name': self.ms.source.name }, 'serial_number': self.machine_serial_number, 'reference': self.ms.reference, 'public_ip_address': self.ip } def clean_dict(d): for k, v in list(d.items()): if v is None or v == "": del d[k] return d if self.business_unit: tree['business_unit'] = self.business_unit.serialize() for t in snapshot: table_name = t.pop('table_name') if table_name == 'os_version': os_version = clean_dict(t) if os_version: tree['os_version'] = os_version elif table_name == 'system_info': system_info = clean_dict(t) if system_info: tree['system_info'] = system_info elif table_name == 'network_interface': network_interface = clean_dict(t) if network_interface: network_interfaces = tree.setdefault( 'network_interfaces', []) if network_interface not in network_interfaces: network_interfaces.append(network_interface) else: logger.warning("Duplicated network interface") elif table_name == 'apps': bundle_path = t.pop('bundle_path') osx_app = clean_dict(t) if osx_app and bundle_path: osx_app_instance = { 'app': osx_app, 'bundle_path': bundle_path } osx_app_instances = tree.setdefault( 'osx_app_instances', []) if osx_app_instance not in osx_app_instances: osx_app_instances.append(osx_app_instance) else: logger.warning("Duplicated osx app instance") elif table_name == 'deb_packages': deb_package = clean_dict(t) if deb_package: deb_packages = tree.setdefault('deb_packages', []) if deb_package not in deb_packages: deb_packages.append(deb_package) else: logger.warning("Duplicated deb package") commit_machine_snapshot_and_trigger_events(tree)
def do_post(self, data): # lock enrolled machine EnrolledMachine.objects.select_for_update().filter( serial_number=self.machine_serial_number) # commit machine snapshot ms_tree = data['machine_snapshot'] ms_tree['source'] = { 'module': 'zentral.contrib.munki', 'name': 'Munki' } ms_tree['reference'] = ms_tree['serial_number'] ms_tree['public_ip_address'] = self.ip if self.business_unit: ms_tree['business_unit'] = self.business_unit.serialize() prepare_ms_tree_certificates(ms_tree) extra_facts = ms_tree.pop("extra_facts", None) if isinstance(extra_facts, dict): ms_tree["extra_facts"] = remove_null_character(extra_facts) ms = commit_machine_snapshot_and_trigger_events(ms_tree) if not ms: raise RuntimeError("Could not commit machine snapshot") # delete all managed installs if last seen report not found # which is a good indicator that the machine has been wiped last_seen_report_found = data.get("last_seen_report_found") if last_seen_report_found is not None and not last_seen_report_found: ManagedInstall.objects.filter( machine_serial_number=self.machine_serial_number).delete() # prepare reports reports = [] report_count = event_count = 0 for r in data.pop('reports'): report_count += 1 event_count += len(r.get("events", [])) reports.append((parser.parse(r.pop('start_time')), parser.parse(r.pop('end_time')), r)) reports.sort() munki_request_event_kwargs = { "request_type": "postflight", "enrollment": { "pk": self.enrollment.pk }, "report_count": report_count, "event_count": event_count, } if last_seen_report_found is not None: munki_request_event_kwargs[ "last_seen_report_found"] = last_seen_report_found # update machine managed installs managed_installs = data.get("managed_installs") if managed_installs is not None: munki_request_event_kwargs["managed_installs"] = True munki_request_event_kwargs["managed_install_count"] = len( managed_installs) # update managed installs using the complete list incident_updates = apply_managed_installs( self.machine_serial_number, managed_installs, self.enrollment.configuration) # incident updates are attached to the munki request event if incident_updates: munki_request_event_kwargs[ "incident_updates"] = incident_updates else: munki_request_event_kwargs["managed_installs"] = False # update managed installs using the install and removal events in the reports for _, _, report in reports: for created_at, event in report.get("events", []): # time event_time = parser.parse(created_at) if is_aware(event_time): event_time = make_naive(event_time) for incident_update in update_managed_install_with_event( self.machine_serial_number, event, event_time, self.enrollment.configuration): # incident updates are attached to each munki event event.setdefault("incident_updates", []).append(incident_update) # update machine munki state update_dict = {'user_agent': self.user_agent, 'ip': self.ip} if managed_installs is not None: update_dict["last_managed_installs_sync"] = datetime.utcnow() if reports: start_time, end_time, report = reports[-1] update_dict.update({ 'munki_version': report.get('munki_version', None), 'sha1sum': report['sha1sum'], 'run_type': report['run_type'], 'start_time': start_time, 'end_time': end_time }) MunkiState.objects.update_or_create( machine_serial_number=self.machine_serial_number, defaults=update_dict) # events post_munki_request_event(self.machine_serial_number, self.user_agent, self.ip, **munki_request_event_kwargs) post_munki_events(self.machine_serial_number, self.user_agent, self.ip, (r for _, _, r in reports)) return {}