def settings_page(request): historical_setting = utils.get_setting('historical_retention') historical_setting_form = forms.SettingsHistoricalDataForm(initial={'days': historical_setting}) senddata_setting = utils.get_setting('send_data') context = { 'user': request.user, 'request': request, 'historical_setting_form': historical_setting_form, 'senddata_setting': senddata_setting} return render(request, 'server/settings.html', context)
def machine_detail_facter(request, machine_id, **kwargs): machine = kwargs['instance'] table_data = [] if machine.facts.count() != 0: facts = machine.facts.all() if settings.EXCLUDED_FACTS: for excluded in settings.EXCLUDED_FACTS: facts = facts.exclude(fact_name=excluded) else: facts = None if facts: for fact in facts: item = {} item['key'] = fact.fact_name item['value'] = fact.fact_data table_data.append(item) key_header = 'Fact' value_header = 'Data' title = 'Facter data for %s' % machine.hostname page_length = utils.get_setting('datatable_page_length') c = { 'user': request.user, 'machine': machine, 'table_data': table_data, 'title': title, 'key_header': key_header, 'value_header': value_header, 'page_length': page_length } return render(request, 'server/machine_detail_table.html', c)
def machine_detail_conditions(request, machine_id, **kwargs): machine = kwargs['instance'] table_data = [] if machine.conditions.count() != 0: conditions = machine.conditions.all() if settings.EXCLUDED_CONDITIONS: for excluded in settings.EXCLUDED_CONDITIONS: conditions = conditions.exclude(condition_name=excluded) else: conditions = None if conditions: for condition in conditions: item = {} item['key'] = condition.condition_name item['value'] = condition.condition_data table_data.append(item) key_header = 'Condition' value_header = 'Data' title = 'Munki conditions data for %s' % machine.hostname page_length = utils.get_setting('datatable_page_length') c = { 'user': request.user, 'machine': machine, 'table_data': table_data, 'title': title, 'key_header': key_header, 'value_header': value_header, 'page_length': page_length } return render(request, 'server/machine_detail_table.html', c)
def index(request): # Get the current user's Business Units user = request.user # Count the number of users. If there is only one, they need to be made a GA if User.objects.count() == 1: # The first user created by syncdb won't have a profile. If there isn't # one, make sure they get one. try: profile = UserProfile.objects.get(user=user) except UserProfile.DoesNotExist: profile = UserProfile(user=user) profile.level = ProfileLevel.global_admin profile.save() user_is_ga = is_global_admin(user) if not user_is_ga: # user has many BU's display them all in a friendly manner if user.businessunit_set.count() == 0: c = { 'user': request.user, } return render(request, 'server/no_access.html', c) if user.businessunit_set.count() == 1: # user only has one BU, redirect to it return redirect('bu_dashboard', bu_id=user.businessunit_set.all()[0].id) # Load in the default plugins if needed utils.load_default_plugins() plugins = sal.plugin.PluginManager().get_all_plugins() reports = utils.get_report_names(plugins) output = utils.get_plugin_placeholder_markup(plugins) # If the user is GA level, and hasn't decided on a data sending # choice, template will reflect this. data_choice = False if ( user_is_ga and utils.get_setting('send_data') is None) else True # get the user level - if they're a global admin, show all of the # machines. If not, show only the machines they have access to if user_is_ga: business_units = BusinessUnit.objects.all() else: business_units = user.businessunit_set.all() context = { 'user': request.user, 'business_units': business_units, 'output': output, 'data_setting_decided': data_choice, 'reports': reports } context.update(utils.check_version()) return render(request, 'server/index.html', context)
class Meta: columns = ['hostname', 'serial', 'last_checkin', 'console_user', 'install_count'] labels = { 'hostname': 'Machine', 'serial': 'Serial Number', 'last_checkin': 'Last Checkin', 'console_user': '******'} processors = {'hostname': 'get_machine_link', 'last_checkin': 'format_date'} structure_template = 'datatableview/bootstrap_structure.html' page_length = utils.get_setting('datatable_page_length')
def _get_status_levels(self): message_values = list(zip(*Message.MESSAGE_TYPES))[0] # Default to using only the highest severity message. status_setting = get_setting('MessagesPluginLevel', message_values[0]) if status_setting.upper() not in message_values: status_setting = message_values[0] return message_values[:message_values.index(status_setting) + 1]
def update_notify_date(request, length='never'): # Don't notify about a new version until there is a new one version_report = utils.check_version() if version_report['new_version_available']: if isinstance(length, int): next_notify_date = utils.get_setting('next_notify_date', time.time()) + length else: next_notify_date = length utils.set_setting('next_notify_date', next_notify_date)
def get_context(self, machines, **kwargs): context = self.super_get_context(machines, **kwargs) # Remove invalid versions, then annotate with a count. os_info = ( machines .exclude(operating_system__isnull=True) .exclude(operating_system='') .order_by('operating_system') .values('operating_system', 'os_family') .distinct() .annotate(count=Count('operating_system'))) grouped = defaultdict(list) for version in os_info: os_type = OS_TABLE[version['os_family']] grouped[os_type].append(version) # print grouped normalize_chromeos_versions = get_setting('normalize_chromeos_versions') # print type(normalize_chromeos_versions) if normalize_chromeos_versions: chrome_items = [] for chrome_item in grouped['Chrome OS']: version_array = chrome_item['operating_system'].split('.') if len(version_array) <= 3: version_string = chrome_item['operating_system'] else: version_string = '{}.{}.{}'.format( version_array[0], version_array[1], version_array[2] ) found = False for item in chrome_items: if item['operating_system'] == version_string: item['count'] = item['count'] + chrome_item['count'] found = True continue if not found: item_to_add = {} item_to_add['operating_system'] = version_string item_to_add['os_family'] = 'ChromeOS' item_to_add['count'] = chrome_item['count'] chrome_items.append(item_to_add) grouped['Chrome OS'] = chrome_items # you and your lanbda's @sheacraig... os_key = lambda x: LooseVersion(x["operating_system"]) # noqa: E731 output = [ (key, sorted(grouped[key], key=os_key, reverse=True)) for key in OS_TABLE.values()] context['os_info'] = output return context
def machine_list(request, plugin_name, data, group_type='all', group_id=None): context = { 'group_type': group_type, 'group_id': group_id, 'plugin_name': plugin_name, 'request': request, 'data': data, 'page_length': utils.get_setting('datatable_page_length') } return render(request, 'server/overview_list_all.html', context)
def index(request): # Get the current user's Business Units user = request.user # Count the number of users. If there is only one, they need to be made a GA if User.objects.count() == 1: # The first user created by syncdb won't have a profile. If there isn't # one, make sure they get one. try: profile = UserProfile.objects.get(user=user) except UserProfile.DoesNotExist: profile = UserProfile(user=user) profile.level = ProfileLevel.global_admin profile.save() user_is_ga = is_global_admin(user) if not user_is_ga: # user has many BU's display them all in a friendly manner if user.businessunit_set.count() == 0: c = {'user': request.user, } return render(request, 'server/no_access.html', c) if user.businessunit_set.count() == 1: # user only has one BU, redirect to it return redirect('bu_dashboard', bu_id=user.businessunit_set.all()[0].id) # Load in the default plugins if needed utils.load_default_plugins() plugins = sal.plugin.PluginManager().get_all_plugins() reports = utils.get_report_names(plugins) output = utils.get_plugin_placeholder_markup(plugins) # If the user is GA level, and hasn't decided on a data sending # choice, template will reflect this. data_choice = False if (user_is_ga and utils.get_setting('send_data') is None) else True # get the user level - if they're a global admin, show all of the # machines. If not, show only the machines they have access to if user_is_ga: business_units = BusinessUnit.objects.all() else: business_units = user.businessunit_set.all() context = { 'user': request.user, 'business_units': business_units, 'output': output, 'data_setting_decided': data_choice, 'reports': reports} context.update(utils.check_version()) return render(request, 'server/index.html', context)
def get_context(self, machines, **kwargs): context = self.super_get_context(machines, **kwargs) # Remove invalid versions, then annotate with a count. os_info = ( machines .exclude(operating_system__isnull=True) .exclude(operating_system='') .order_by('operating_system') .values('operating_system', 'os_family') .distinct() .annotate(count=Count('operating_system'))) grouped = defaultdict(list) for version in os_info: os_type = OS_TABLE[version['os_family']] grouped[os_type].append(version) normalize_chromeos_versions = get_setting('normalize_chromeos_versions') if normalize_chromeos_versions: chrome_items = [] for chrome_item in grouped['Chrome OS']: version_array = chrome_item['operating_system'].split('.') if len(version_array) <= 3: version_string = chrome_item['operating_system'] else: version_string = '{}.{}.{}'.format( version_array[0], version_array[1], version_array[2] ) found = False for item in chrome_items: if item['operating_system'] == version_string: item['count'] = item['count'] + chrome_item['count'] found = True continue if not found: item_to_add = {} item_to_add['operating_system'] = version_string item_to_add['os_family'] = 'ChromeOS' item_to_add['count'] = chrome_item['count'] chrome_items.append(item_to_add) grouped['Chrome OS'] = chrome_items # you and your lanbda's @sheacraig... os_key = lambda x: LooseVersion(x["operating_system"]) # noqa: E731 output = [ (key, sorted(grouped[key], key=os_key, reverse=True)) for key in OS_TABLE.values()] context['os_info'] = output return context
def get_queryset(self): queryset = self.filter_queryset_by_group(self.model.objects).distinct() crufty_bundles = [] # The inventory can be configured to filter bundleids out of # results by setting the 'inventory_exclusion_pattern' setting # in the SalSettings table. # The value of this setting should be a regular expression using # the python re module's syntax. You may delimit multiple # patterns with the '|' operator, e.g.: # 'com\.[aA]dobe.*|com\.apple\..*' inventory_pattern = utils.get_setting('inventory_exclusion_pattern') if inventory_pattern: crufty_bundles.append(inventory_pattern) # By default, Sal will filter out the apps proxied through # VMWare and Parallels VMs. If you would like to disable this, # set the SalSetting 'filter_proxied_virtualization_apps' to # 'no' or 'false' (it's a string). if utils.get_setting('filter_proxied_virtualization_apps', True): # Virtualization proxied apps crufty_bundles.extend( ["com\.vmware\.proxyApp\..*", "com\.parallels\.winapp\..*"]) # Apple apps that are not generally used by users; currently # unused, but here for reference. # apple_cruft_pattern = (r'com.apple.(?!iPhoto)(?!iWork)(?!Aperture)' # r'(?!iDVD)(?!garageband)(?!iMovieApp)(?!Server)(?!dt\.Xcode).*') crufty_pattern = '|'.join(crufty_bundles) if crufty_pattern: queryset = queryset.exclude(bundleid__regex=crufty_pattern) return queryset
def get_queryset(self): queryset = self.filter_queryset_by_group(self.model.objects).distinct() crufty_bundles = [] # The inventory can be configured to filter bundleids out of # results by setting the 'inventory_exclusion_pattern' setting # in the SalSettings table. # The value of this setting should be a regular expression using # the python re module's syntax. You may delimit multiple # patterns with the '|' operator, e.g.: # 'com\.[aA]dobe.*|com\.apple\..*' inventory_pattern = utils.get_setting('inventory_exclusion_pattern') if inventory_pattern: crufty_bundles.append(inventory_pattern) # By default, Sal will filter out the apps proxied through # VMWare and Parallels VMs. If you would like to disable this, # set the SalSetting 'filter_proxied_virtualization_apps' to # 'no' or 'false' (it's a string). if utils.get_setting('filter_proxied_virtualization_apps', True): # Virtualization proxied apps crufty_bundles.extend([r"com\.vmware\.proxyApp\..*", r"com\.parallels\.winapp\..*"]) # Apple apps that are not generally used by users; currently # unused, but here for reference. # apple_cruft_pattern = (r'com.apple.(?!iPhoto)(?!iWork)(?!Aperture)' # r'(?!iDVD)(?!garageband)(?!iMovieApp)(?!Server)(?!dt\.Xcode).*') crufty_pattern = '|'.join(crufty_bundles) if crufty_pattern: queryset = queryset.exclude(bundleid__regex=crufty_pattern) return queryset
def filter(self, machines, data): try: os_family, operating_system = data.split('&') except ValueError: return None, None normalize_chromeos_versions = get_setting('normalize_chromeos_versions') if os_family == 'ChromeOS' and normalize_chromeos_versions: machines = machines.filter( operating_system__startswith=operating_system, os_family=os_family ) else: machines = machines.filter(operating_system=operating_system, os_family=os_family) return machines, 'Machines running {} {}'.format(OS_TABLE[os_family], operating_system)
def machine_list(request, plugin_name, data, group_type='all', group_id=None): plugin_object = process_plugin(request, plugin_name, group_type, group_id) # queryset = plugin_object.get_queryset(request, group_type=group_type, group_id=group_id) # Plugin will raise 404 if bad `data` is passed. machines, title = plugin_object.filter_machines(Machine.objects.none(), data) context = { 'group_type': group_type, 'group_id': group_id, 'plugin_name': plugin_name, 'machines': machines, 'title': title, 'request': request, 'data': data, 'page_length': utils.get_setting('datatable_page_length')} return render(request, 'server/overview_list_all.html', context)
def machine_list(request, plugin_name, data, group_type='all', group_id=None): plugin_object = process_plugin(plugin_name, group_type, group_id) # Plugin will raise 404 if bad `data` is passed. machines, title = plugin_object.filter_machines(Machine.objects.none(), data) context = { 'group_type': group_type, 'group_id': group_id, 'plugin_name': plugin_name, 'machines': machines, 'title': title, 'request': request, 'data': data, 'page_length': utils.get_setting('datatable_page_length') } return render(request, 'server/overview_list_all.html', context)
def get_context(self, queryset, **kwargs): context = self.super_get_context(queryset, **kwargs) ip_address = "" if queryset.conditions.count() > 0: try: ip_addresses = queryset.conditions.get( condition_name="ipv4_address").condition_data # Machines may have multiple IPs. Just use the first. ip_address = ip_addresses.split(",")[0] except Machine.DoesNotExist: pass context['ssh_account'] = get_setting(name='ssh_account', default='').replace('@', '') context["ssh_url"] = "ssh://{}{}".format(context['ssh_account'], ip_address) context["vnc_url"] = "vnc://{}{}".format(context['ssh_account'], ip_address) return context
def get_context(self, queryset, **kwargs): context = self.super_get_context(queryset, **kwargs) try: count_threshold = int(get_setting('MessagesPluginThreshold', DEFAULT_THRESHOLD)) except ValueError: count_threshold = DEFAULT_THRESHOLD messages = ( Message .objects .filter(machine__in=queryset, message_type__in=self._get_status_levels()) .values('text', 'message_type') .annotate(count=Count('text')) .filter(count__gte=count_threshold) .order_by('message_type', 'count')) context['data'] = messages return context
def machine_detail_facts(request, machine_id, management_source, **kwargs): machine = kwargs['instance'] if machine.facts.count() != 0: facts = machine.facts.filter(management_source__name=management_source) if settings.EXCLUDED_FACTS: facts = facts.exclude(fact_name__in=settings.EXCLUDED_FACTS) else: facts = None title = f'{management_source} Facts for {machine.hostname}' context = { 'user': request.user, 'machine': machine, 'fact_sources': get_fact_sources(machine), 'table_data': facts, 'title': title, 'page_length': utils.get_setting('datatable_page_length') } return render(request, 'server/machine_detail_facts.html', context)
def get_context(self, queryset, **kwargs): context = self.super_get_context(queryset, **kwargs) ip_address = "" if queryset.conditions.count() > 0: try: ip_addresses = queryset.conditions.get( condition_name="ipv4_address").condition_data # Machines may have multiple IPs. Just use the first. ip_address = ip_addresses.split(",")[0] except Machine.DoesNotExist: pass account = get_setting(name='ssh_account', default='').strip() context['ssh_account'] = account delimiter = '' if not account else '@' context["ssh_url"] = "ssh://{}{}{}".format(account, delimiter, ip_address) context["vnc_url"] = "vnc://{}{}{}".format(account, delimiter, ip_address) return context
def get_context(self, machine, **kwargs): context = defaultdict(str) context['title'] = self.description crypt_url = utils.get_setting('crypt_url', None) machine_url = crypt_url if crypt_url: crypt_url = crypt_url.rstrip() if crypt_url: try: verify = settings.ROOT_CA except AttributeError: verify = True request_url = '{}/verify/{}/recovery_key/'.format( crypt_url, machine.serial) output = None machine_url = crypt_url try: response = requests.get(request_url, verify=verify) if response.status_code == requests.codes.ok: output = response.json() # Have template link to machine info page rather # than Crypt root. machine_url = '{}/info/{}'.format(crypt_url, machine.serial) except RequestException: # Either there was an error or the machine hasn't been # seen. pass if output: context['escrowed'] = output['escrowed'] if output['escrowed']: context['date_escrowed'] = parse_datetime( output['date_escrowed']) context['crypt_url'] = machine_url return context
def get_context(self, queryset, **kwargs): context = self.super_get_context(queryset, **kwargs) ip_address = "" if queryset.facts.count() > 0: try: ip_addresses = queryset.facts.get( fact_name="ipv4_address").fact_data # Machines may have multiple IPs. Just use the first. ip_address = ip_addresses.split(",")[0] except Fact.DoesNotExist: pass account = get_setting(name='ssh_account', default='').strip() context['ssh_account'] = account delimiter = '' if not account else '@' context["ssh_url"] = "ssh://{}{}{}".format(account, delimiter, ip_address) context["vnc_url"] = "vnc://{}{}{}".format(account, delimiter, ip_address) return context
def get_context(self, machine, **kwargs): context = defaultdict(str) context['title'] = self.description crypt_url = utils.get_setting('crypt_url', None) machine_url = crypt_url if crypt_url: crypt_url = crypt_url.rstrip() if crypt_url: try: verify = settings.ROOT_CA except AttributeError: verify = True request_url = '{}/verify/{}/recovery_key/'.format(crypt_url, machine.serial) output = None machine_url = crypt_url try: response = requests.get(request_url, verify=verify) if response.status_code == requests.codes.ok: output = response.json() # Have template link to machine info page rather # than Crypt root. machine_url = '{}/info/{}'.format(crypt_url, machine.serial) except RequestException: # Either there was an error or the machine hasn't been # seen. pass if output: context['escrowed'] = output['escrowed'] if output['escrowed']: context['date_escrowed'] = parse_datetime(output['date_escrowed']) context['crypt_url'] = machine_url return context
def checkin(request): data = request.POST # Take out some of the weird junk VMware puts in. Keep an eye out in case # Apple actually uses these: serial = data.get('serial', '').upper().translate(SERIAL_TRANSLATE) # Are we using Sal for some sort of inventory (like, I don't know, Puppet?) if utils.get_django_setting('ADD_NEW_MACHINES', True): if serial: try: machine = Machine.objects.get(serial=serial) except Machine.DoesNotExist: machine = Machine(serial=serial) else: machine = get_object_or_404(Machine, serial=serial) machine_group_key = data.get('key') if machine_group_key in (None, 'None'): machine_group_key = utils.get_django_setting('DEFAULT_MACHINE_GROUP_KEY') machine.machine_group = get_object_or_404(MachineGroup, key=machine_group_key) machine.last_checkin = django.utils.timezone.now() machine.hostname = data.get('name', '<NO NAME>') machine.sal_version = data.get('sal_version') if utils.get_django_setting('DEPLOYED_ON_CHECKIN', True): machine.deployed = True if bool(data.get('broken_client', False)): machine.broken_client = True machine.save() return HttpResponse("Broken Client report submmitted for %s" % data.get('serial')) report = None # Find the report in the submitted data. It could be encoded # and/or compressed with base64 and bz2. for key in ('bz2report', 'base64report', 'base64bz2report'): if key in data: encoded_report = data[key] report = text_utils.decode_to_string(encoded_report, compression=key) break machine.report = report if not report: machine.activity = False machine.errors = machine.warnings = 0 return report_data = plistlib.readPlistFromString(report) if report_data.get('ConsoleUser') and report_data.get('ConsoleUser') != '_mbsetupuser': machine.console_user = report_data.get('ConsoleUser') elif data.get('username') and data.get('username') != '_mbsetupuser': machine.console_user = data.get('username') else: machine.console_user = None activity_keys = ('AppleUpdates', 'InstallResults', 'RemovalResults') machine.activity = any(report_data.get(s) for s in activity_keys) # Check errors and warnings. machine.errors = len(report_data.get("Errors", [])) machine.warnings = len(report_data.get("Warnings", [])) machine.puppet_version = report_data.get('Puppet_Version') machine.manifest = report_data.get('ManifestName') machine.munki_version = report_data.get('ManagedInstallVersion') puppet = report_data.get('Puppet', {}) if 'time' in puppet: last_run_epoch = float(puppet['time']['last_run']) machine.last_puppet_run = datetime.fromtimestamp(last_run_epoch, tz=pytz.UTC) if 'events' in puppet: machine.puppet_errors = puppet['events']['failure'] # Handle gosal submissions slightly differently from others. machine.os_family = ( report_data['OSFamily'] if 'OSFamily' in report_data else report_data.get('os_family')) machine_info = report_data.get('MachineInfo', {}) if 'os_vers' in machine_info: machine.operating_system = machine_info['os_vers'] # macOS major OS updates don't have a minor version, so add one. if len(machine.operating_system) <= 4 and machine.os_family == 'Darwin': machine.operating_system = machine.operating_system + '.0' else: # Handle gosal and missing os_vers cases. machine.operating_system = machine_info.get('OSVers') # TODO: These should be a number type. # TODO: Cleanup all of the casting to str if we make a number. machine.hd_space = report_data.get('AvailableDiskSpace', '0') machine.hd_total = data.get('disk_size', '0') space = float(machine.hd_space) total = float(machine.hd_total) if space == float(0) or total == float(0): machine.hd_percent = '0' else: try: machine.hd_percent = str(int((total - space) / total * 100)) except ZeroDivisionError: machine.hd_percent = '0' # Get macOS System Profiler hardware info. # Older versions use `HardwareInfo` key, so start there. hwinfo = machine_info.get('HardwareInfo', {}) if not hwinfo: for profile in machine_info.get('SystemProfile', []): if profile['_dataType'] == 'SPHardwareDataType': hwinfo = profile._items[0] break if hwinfo: key_style = 'old' if 'MachineModel' in hwinfo else 'new' machine.machine_model = hwinfo.get(MACHINE_KEYS['machine_model'][key_style]) machine.machine_model_friendly = machine_info.get('machine_model_friendly', '') machine.cpu_type = hwinfo.get(MACHINE_KEYS['cpu_type'][key_style]) machine.cpu_speed = hwinfo.get(MACHINE_KEYS['cpu_speed'][key_style]) machine.memory = hwinfo.get(MACHINE_KEYS['memory'][key_style]) machine.memory_kb = process_memory(machine) # if not machine.machine_model_friendly: # try: # machine.machine_model_friendly = utils.friendly_machine_model(machine) # except Exception: # machine.machine_model_friendly = machine.machine_model machine.save() historical_days = utils.get_setting('historical_retention') now = django.utils.timezone.now() datelimit = now - timedelta(days=historical_days) # Process plugin scripts. # Clear out too-old plugin script submissions first. PluginScriptSubmission.objects.filter(recorded__lt=datelimit).delete() utils.process_plugin_script(report_data.get('Plugin_Results', []), machine) process_managed_items(machine, report_data, data.get('uuid'), now, datelimit) process_facts(machine, report_data, datelimit) process_conditions(machine, report_data) utils.run_plugin_processing(machine, report_data) if utils.get_setting('send_data') in (None, True): # If setting is None, it hasn't been configured yet; assume True utils.send_report() return HttpResponse("Sal report submmitted for %s" % data.get('name'))
def checkin(request): if request.method != 'POST': print 'not post data' return HttpResponseNotFound('No POST data sent') data = request.POST key = data.get('key') uuid = data.get('uuid') serial = data.get('serial') serial = serial.upper() broken_client = data.get('broken_client', False) # Take out some of the weird junk VMware puts in. Keep an eye out in case # Apple actually uses these: serial = serial.replace('/', '') serial = serial.replace('+', '') # Are we using Sal for some sort of inventory (like, I don't know, Puppet?) try: add_new_machines = settings.ADD_NEW_MACHINES except Exception: add_new_machines = True if add_new_machines: # look for serial number - if it doesn't exist, create one if serial: try: machine = Machine.objects.get(serial=serial) except Machine.DoesNotExist: machine = Machine(serial=serial) else: machine = get_object_or_404(Machine, serial=serial) try: deployed_on_checkin = settings.DEPLOYED_ON_CHECKIN except Exception: deployed_on_checkin = True if key is None or key == 'None': try: key = settings.DEFAULT_MACHINE_GROUP_KEY except Exception: pass machine_group = get_object_or_404(MachineGroup, key=key) machine.machine_group = machine_group machine.last_checkin = django.utils.timezone.now() if bool(broken_client): machine.broken_client = True machine.save() return HttpResponse("Broken Client report submmitted for %s" % data.get('serial')) else: machine.broken_client = False historical_days = utils.get_setting('historical_retention') machine.hostname = data.get('name', '<NO NAME>') if 'username' in data: if data.get('username') != '_mbsetupuser': machine.console_user = data.get('username') if 'base64bz2report' in data: machine.update_report(data.get('base64bz2report')) if 'base64report' in data: machine.update_report(data.get('base64report'), 'base64') if 'sal_version' in data: machine.sal_version = data.get('sal_version') # extract machine data from the report report_data = machine.get_report() if 'Puppet_Version' in report_data: machine.puppet_version = report_data['Puppet_Version'] if 'ManifestName' in report_data: manifest = report_data['ManifestName'] machine.manifest = manifest if 'MachineInfo' in report_data: machine.operating_system = report_data['MachineInfo'].get( 'os_vers', 'UNKNOWN') # some machines are reporting 10.9, some 10.9.0 - make them the same if len(machine.operating_system) <= 4: machine.operating_system = machine.operating_system + '.0' # if gosal is the sender look for OSVers key if 'OSVers' in report_data['MachineInfo']: machine.operating_system = report_data['MachineInfo'].get('OSVers') machine.hd_space = report_data.get('AvailableDiskSpace') or 0 machine.hd_total = int(data.get('disk_size')) or 0 if machine.hd_total == 0: machine.hd_percent = 0 else: machine.hd_percent = int( round(((float(machine.hd_total) - float(machine.hd_space)) / float(machine.hd_total)) * 100)) machine.munki_version = report_data.get('ManagedInstallVersion') or 0 hwinfo = {} # macOS System Profiler if 'SystemProfile' in report_data.get('MachineInfo', []): for profile in report_data['MachineInfo']['SystemProfile']: if profile['_dataType'] == 'SPHardwareDataType': hwinfo = profile._items[0] break if 'HardwareInfo' in report_data.get('MachineInfo', []): hwinfo = report_data['MachineInfo']['HardwareInfo'] if 'Puppet' in report_data: puppet = report_data.get('Puppet') if 'time' in puppet: machine.last_puppet_run = datetime.fromtimestamp( float(puppet['time']['last_run'])) if 'events' in puppet: machine.puppet_errors = puppet['events']['failure'] if hwinfo: # setup vars for hash keys we might get sent if 'MachineModel' in hwinfo: var_machine_model = 'MachineModel' var_cpu_type = 'CPUType' var_cpu_speed = 'CurrentProcessorSpeed' var_memory = 'PhysicalMemory' else: var_machine_model = 'machine_model' var_cpu_type = 'cpu_type' var_cpu_speed = 'current_processor_speed' var_memory = 'physical_memory' machine.machine_model = hwinfo.get(var_machine_model) machine.cpu_type = hwinfo.get(var_cpu_type) machine.cpu_speed = hwinfo.get(var_cpu_speed) machine.memory = hwinfo.get(var_memory) if hwinfo.get(var_memory)[-2:] == 'KB': machine.memory_kb = int(hwinfo.get(var_memory)[:-3]) if hwinfo.get(var_memory)[-2:] == 'MB': memory_mb = float(hwinfo.get(var_memory)[:-3]) machine.memory_kb = int(memory_mb * 1024) if hwinfo.get(var_memory)[-2:] == 'GB': memory_gb = float(hwinfo.get(var_memory)[:-3]) machine.memory_kb = int(memory_gb * 1024 * 1024) if hwinfo.get(var_memory)[-2:] == 'TB': memory_tb = float(hwinfo.get(var_memory)[:-3]) machine.memory_kb = int(memory_tb * 1024 * 1024 * 1024) if 'os_family' in report_data: machine.os_family = report_data['os_family'] # support golang strict structure if 'OSFamily' in report_data: machine.os_family = report_data['OSFamily'] if not machine.machine_model_friendly: try: machine.machine_model_friendly = utils.friendly_machine_model( machine) except Exception: machine.machine_model_friendly = machine.machine_model if deployed_on_checkin is True: machine.deployed = True machine.save() # If Plugin_Results are in the report, handle them try: datelimit = django.utils.timezone.now() - timedelta( days=historical_days) PluginScriptSubmission.objects.filter(recorded__lt=datelimit).delete() except Exception: pass if 'Plugin_Results' in report_data: utils.process_plugin_script(report_data.get('Plugin_Results'), machine) # Remove existing PendingUpdates for the machine machine.pending_updates.all().delete() now = django.utils.timezone.now() if 'ItemsToInstall' in report_data: pending_update_to_save = [] update_history_item_to_save = [] for update in report_data.get('ItemsToInstall'): display_name = update.get('display_name', update['name']) update_name = update.get('name') version = str(update['version_to_install']) if version: pending_update = PendingUpdate(machine=machine, display_name=display_name, update_version=version, update=update_name) if IS_POSTGRES: pending_update_to_save.append(pending_update) else: pending_update.save() # Let's handle some of those lovely pending installs into the UpdateHistory Model try: update_history = UpdateHistory.objects.get( name=update_name, version=version, machine=machine, update_type='third_party') except UpdateHistory.DoesNotExist: update_history = UpdateHistory(name=update_name, version=version, machine=machine, update_type='third_party') update_history.save() if not update_history.pending_recorded: update_history_item = UpdateHistoryItem( update_history=update_history, status='pending', recorded=now, uuid=uuid) update_history.pending_recorded = True update_history.save() if IS_POSTGRES: update_history_item_to_save.append(update_history_item) else: update_history_item.save() if IS_POSTGRES: PendingUpdate.objects.bulk_create(pending_update_to_save) UpdateHistoryItem.objects.bulk_create(update_history_item_to_save) machine.installed_updates.all().delete() if 'ManagedInstalls' in report_data: # Due to a quirk in how Munki 3 processes updates with dependencies, # it's possible to have multiple entries in the ManagedInstalls list # that share an update_name and installed_version. This causes an # IntegrityError in Django since (machine_id, update, update_version) # must be unique.Until/(unless!) this is addressed in Munki, we need to # be careful to not add multiple items with the same name and version. # We'll store each (update_name, version) combo as we see them. seen_names_and_versions = [] installed_updates_to_save = [] for update in report_data.get('ManagedInstalls'): display_name = update.get('display_name', update['name']) update_name = update.get('name') version = str(update.get('installed_version', 'UNKNOWN')) installed = update.get('installed') if (update_name, version) not in seen_names_and_versions: seen_names_and_versions.append((update_name, version)) if (version != 'UNKNOWN' and version is not None and len(version) != 0): installed_update = InstalledUpdate( machine=machine, display_name=display_name, update_version=version, update=update_name, installed=installed) if IS_POSTGRES: installed_updates_to_save.append(installed_update) else: installed_update.save() if IS_POSTGRES: InstalledUpdate.objects.bulk_create(installed_updates_to_save) # Remove existing PendingAppleUpdates for the machine machine.pending_apple_updates.all().delete() if 'AppleUpdates' in report_data: for update in report_data.get('AppleUpdates'): display_name = update.get('display_name', update['name']) update_name = update.get('name') version = str(update['version_to_install']) try: pending_update = PendingAppleUpdate.objects.get( machine=machine, display_name=display_name, update_version=version, update=update_name) except PendingAppleUpdate.DoesNotExist: pending_update = PendingAppleUpdate(machine=machine, display_name=display_name, update_version=version, update=update_name) pending_update.save() # Let's handle some of those lovely pending installs into the UpdateHistory Model try: update_history = UpdateHistory.objects.get(name=update_name, version=version, machine=machine, update_type='apple') except UpdateHistory.DoesNotExist: update_history = UpdateHistory(name=update_name, version=version, machine=machine, update_type='apple') update_history.save() if not update_history.pending_recorded: update_history_item = UpdateHistoryItem( update_history=update_history, status='pending', recorded=now, uuid=uuid) update_history_item.save() update_history.pending_recorded = True update_history.save() # if Facter data is submitted, we need to first remove any existing facts for this machine if IS_POSTGRES: # If we are using postgres, we can just dump them all and do a bulk create if 'Facter' in report_data: facts = machine.facts.all().delete() try: datelimit = django.utils.timezone.now() - timedelta( days=historical_days) HistoricalFact.objects.filter( fact_recorded__lt=datelimit).delete() except Exception: pass try: historical_facts = settings.HISTORICAL_FACTS except Exception: historical_facts = [] pass facts_to_be_created = [] historical_facts_to_be_created = [] for fact_name, fact_data in report_data['Facter'].iteritems(): skip = False if hasattr(settings, 'IGNORE_FACTS'): for prefix in settings.IGNORE_FACTS: if fact_name.startswith(prefix): skip = True if skip: continue facts_to_be_created.append( Fact(machine=machine, fact_data=fact_data, fact_name=fact_name)) if fact_name in historical_facts: historical_facts_to_be_created.append( HistoricalFact(machine=machine, fact_data=fact_data, fact_name=fact_name)) Fact.objects.bulk_create(facts_to_be_created) if len(historical_facts_to_be_created) != 0: HistoricalFact.objects.bulk_create( historical_facts_to_be_created) else: if 'Facter' in report_data: facts = machine.facts.all() for fact in facts: skip = False if hasattr(settings, 'IGNORE_FACTS'): for prefix in settings.IGNORE_FACTS: if fact.fact_name.startswith(prefix): skip = True fact.delete() break if not skip: continue found = False for fact_name, fact_data in report_data['Facter'].iteritems(): if fact.fact_name == fact_name: found = True break if not found: fact.delete() # Delete old historical facts try: datelimit = django.utils.timezone.now() - timedelta( days=historical_days) HistoricalFact.objects.filter( fact_recorded__lt=datelimit).delete() except Exception: pass try: historical_facts = settings.HISTORICAL_FACTS except Exception: historical_facts = [] pass # now we need to loop over the submitted facts and save them facts = machine.facts.all() for fact_name, fact_data in report_data['Facter'].iteritems(): if machine.os_family == 'Windows': # We had a little trouble parsing out facts on Windows, clean up here if fact_name.startswith('value=>'): fact_name = fact_name.replace('value=>', '', 1) # does fact exist already? found = False skip = False if hasattr(settings, 'IGNORE_FACTS'): for prefix in settings.IGNORE_FACTS: if fact_name.startswith(prefix): skip = True break if skip: continue for fact in facts: if fact_name == fact.fact_name: # it exists, make sure it's got the right info found = True if fact_data == fact.fact_data: # it's right, break break else: fact.fact_data = fact_data fact.save() break if not found: fact = Fact(machine=machine, fact_data=fact_data, fact_name=fact_name) fact.save() if fact_name in historical_facts: fact = HistoricalFact(machine=machine, fact_name=fact_name, fact_data=fact_data, fact_recorded=datetime.now()) fact.save() if IS_POSTGRES: if 'Conditions' in report_data: machine.conditions.all().delete() conditions_to_be_created = [] for condition_name, condition_data in report_data[ 'Conditions'].iteritems(): # Skip the conditions that come from facter if 'Facter' in report_data and condition_name.startswith( 'facter_'): continue condition_data = text_utils.stringify(condition_data) conditions_to_be_created.append( Condition(machine=machine, condition_name=condition_name, condition_data=text_utils.safe_unicode( condition_data))) Condition.objects.bulk_create(conditions_to_be_created) else: if 'Conditions' in report_data: conditions = machine.conditions.all() for condition in conditions: found = False for condition_name, condition_data in report_data[ 'Conditions'].iteritems(): if condition.condition_name == condition_name: found = True break if found is False: condition.delete() conditions = machine.conditions.all() for condition_name, condition_data in report_data[ 'Conditions'].iteritems(): # Skip the conditions that come from facter if 'Facter' in report_data and condition_name.startswith( 'facter_'): continue # if it's a list (more than one result), # we're going to conacetnate it into one comma separated string. condition_data = text_utils.stringify(condition_data) found = False for condition in conditions: if condition_name == condition.condition_name: # it exists, make sure it's got the right info found = True if condition_data == condition.condition_data: # it's right, break break else: condition.condition_data = condition_data condition.save() break if found is False: condition = Condition( machine=machine, condition_name=condition_name, condition_data=text_utils.safe_unicode(condition_data)) condition.save() utils.run_plugin_processing(machine, report_data) if utils.get_setting('send_data') in (None, True): # If setting is None, it hasn't been configured yet; assume True current_version = utils.send_report() else: current_version = utils.get_current_release_version_number() if current_version: utils.set_setting('current_version', current_version) return HttpResponse("Sal report submmitted for %s" % data.get('name'))
class Meta: columns = ['name', 'bundleid', 'bundlename', 'install_count'] labels = {'bundleid': 'Bundle ID', 'bundlename': 'Bundle Name'} processors = {'name': 'link_to_detail'} structure_template = 'datatableview/bootstrap_structure.html' page_length = utils.get_setting('datatable_page_length')