def get(self, request, *args, **kwargs): # Filter data by access level queryset = self.filter_queryset_by_group(self.model.objects) # Group information is in the URL path group_type = self.kwargs['group_type'] group_id = self.kwargs['group_id'] # App id and filters are queries application_id = self.request.GET.get('pk', '0') field_type = self.request.GET.get('field_type', 'all') field_value = self.request.GET.get('field_value', '') if application_id == "0": # All Applications. self.set_header(["Name", "BundleID", "BundleName", "Install Count"]) self.components = ['application', 'list', 'for', group_type] if group_type != "all": self.components.append(group_id) if utils.is_postgres(): apps = [ self.get_application_entry(item, queryset) for item in queryset.select_related("application").order_by().distinct("application")] data = apps else: # TODO: This is super slow. This probably shouldn't be # used except in testing, but it could be improved. apps = {self.get_application_entry(item, queryset) for item in queryset.select_related("application")} data = sorted(apps, key=lambda x: x[0]) else: # Inventory List for one application. self.set_header(["Hostname", "Serial Number", "Last Checkin", "Console User"]) self.components = ["application", application_id, "for", group_type] if group_type != "all": self.components.append(group_id) if field_type != "all": self.components.extend(["where", field_type, "is", quote(field_value)]) queryset = queryset.filter(application=application_id) if field_type == "path": queryset = queryset.filter(path=field_value) elif field_type == "version": queryset = queryset.filter(version=field_value) data = [self.get_machine_entry(item, queryset) for item in queryset.select_related("machine")] return self.render_to_csv(data)
def _get_unique_items(self, details): """Use optimized DB methods for getting unique items if possible.""" if utils.is_postgres(): versions = (details .order_by("version") .distinct("version") .values_list("version", flat=True)) paths = details.order_by("path").distinct("path").values_list("path", flat=True) else: details = details.values() versions = {item["version"] for item in details} paths = {item["path"] for item in details} # We need to sort the versions for non-Postgres. versions = sorted(list(versions), key=lambda v: LooseVersion(v)) return (versions, paths)
from sal.plugin import (Widget, ReportPlugin, OldPluginAdapter, PluginManager, DEPRECATED_PLUGIN_TYPES) from server import text_utils from server import utils from server.models import (Machine, Condition, Fact, HistoricalFact, MachineGroup, UpdateHistory, UpdateHistoryItem, InstalledUpdate, PendingAppleUpdate, PluginScriptSubmission, PendingUpdate, Plugin, Report, MachineDetailPlugin) if settings.DEBUG: import logging logging.basicConfig(level=logging.INFO) # The database probably isn't going to change while this is loaded. IS_POSTGRES = utils.is_postgres() IGNORED_CSV_FIELDS = ('id', 'machine_group', 'report', 'os_family') HISTORICAL_FACTS = utils.get_django_setting('HISTORICAL_FACTS', []) IGNORE_PREFIXES = utils.get_django_setting('IGNORE_FACTS', []) MACHINE_KEYS = { 'machine_model': {'old': 'MachineModel', 'new': 'machine_model'}, 'cpu_type': {'old': 'CPUType', 'new': 'cpu_type'}, 'cpu_speed': {'old': 'CurrentProcessorSpeed', 'new': 'current_processor_speed'}, 'memory': {'old': 'PhysicalMemory', 'new': 'physical_memory'}} MEMORY_EXPONENTS = {'KB': 0, 'MB': 1, 'GB': 2, 'TB': 3} # Build a translation table for serial numbers, to remove garbage # VMware puts in. SERIAL_TRANSLATE = {ord(c): None for c in '+/'} @login_required
from django.template.context_processors import csrf import sal.plugin from sal.decorators import (required_level, ProfileLevel, access_required, is_global_admin, ga_required) from server.forms import (BusinessUnitForm, EditUserBusinessUnitForm, EditBusinessUnitForm, MachineGroupForm, EditMachineGroupForm, NewMachineForm) from server.models import (BusinessUnit, MachineGroup, Machine, UserProfile, Report, Plugin, PluginScriptSubmission, PluginScriptRow, ManagedItem, Fact) from server.non_ui_views import process_plugin from server import utils # The database probably isn't going to change while this is loaded. IS_POSTGRES = utils.is_postgres() # Bootstrap button classes for managed item statuses. STATUSES = { 'PRESENT': 'btn-success', 'ABSENT': 'btn-danger', 'PENDING': 'btn-info', 'ERROR': 'btn-danger', 'UNKNOWN': 'btn-default' } logger = logging.getLogger(__name__) @login_required def index(request):
def submit_profiles(request): if request.method != 'POST': return HttpResponseNotFound('No POST data sent') submission = request.POST serial = submission.get('serial').upper() machine = None if serial: try: machine = Machine.objects.get(serial=serial) except Machine.DoesNotExist: return HttpResponseNotFound('Serial Number not found') compression_type = 'base64bz2' if 'base64bz2profiles' in submission: compressed_profiles = submission.get('base64bz2profiles') elif 'base64profiles' in submission: compressed_profiles = submission.get('base64bz2profiles') compression_type = 'base64' if compressed_profiles: compressed_profiles = compressed_profiles.replace(" ", "+") profiles_str = text_utils.decode_to_string(compressed_profiles, compression_type) try: profiles_list = plistlib.readPlistFromString(profiles_str) except Exception: profiles_list = None profiles_to_be_added = [] machine.profile_set.all().delete() if '_computerlevel' in profiles_list: profiles_list = profiles_list['_computerlevel'] for profile in profiles_list: parsed_date = dateutil.parser.parse( profile.get('ProfileInstallDate')) profile_item = Profile( machine=machine, identifier=profile.get('ProfileIdentifier', ''), display_name=profile.get('ProfileDisplayName', ''), description=profile.get('ProfileDescription', ''), organization=profile.get('ProfileOrganization', ''), uuid=profile.get('ProfileUUID', ''), verification_state=profile.get('ProfileVerificationState', ''), install_date=parsed_date) if utils.is_postgres(): profiles_to_be_added.append(profile_item) else: profile_item.save() if utils.is_postgres(): Profile.objects.bulk_create(profiles_to_be_added) stored_profiles = machine.profile_set.all() payloads_to_save = [] for stored_profile in stored_profiles: uuid = stored_profile.uuid identifier = stored_profile.identifier for profile in profiles_list: profile_uuid = profile.get('ProfileUUID', '') profile_id = profile.get('ProfileIdentifier', '') if uuid == profile_uuid and identifier == profile_id: payloads = profile.get('ProfileItems', []) for payload in payloads: payload_item = Payload( profile=stored_profile, identifier=payload.get('PayloadIdentifier', ''), uuid=payload.get('PayloadUUID', ''), payload_type=payload.get('PayloadType', '')) if utils.is_postgres(): payloads_to_save.append(payload_item) else: payload_item.save() break if utils.is_postgres(): Payload.objects.bulk_create(payloads_to_save) return HttpResponse("Profiles submitted for %s.\n" % submission.get('serial')) return HttpResponse("No profiles submitted.\n")
def submit_profiles(request): submission = request.POST serial = submission.get('serial').upper() machine = None if serial: try: machine = Machine.objects.get(serial=serial) except Machine.DoesNotExist: return HttpResponseNotFound('Serial Number not found') compression_type = 'base64bz2' compressed_profiles = None if 'base64bz2profiles' in submission: compressed_profiles = submission.get('base64bz2profiles') elif 'base64profiles' in submission: compressed_profiles = submission.get('base64bz2profiles') compression_type = 'base64' if compressed_profiles: compressed_profiles = compressed_profiles.replace(" ", "+") profiles_str = text_utils.decode_to_string(compressed_profiles, compression_type) try: profiles_list = plistlib.readPlistFromString(profiles_str) except Exception: profiles_list = None profiles_to_be_added = [] machine.profile_set.all().delete() if '_computerlevel' in profiles_list: profiles_list = profiles_list['_computerlevel'] for profile in profiles_list: parsed_date = dateutil.parser.parse(profile.get('ProfileInstallDate')) profile_item = Profile( machine=machine, identifier=profile.get('ProfileIdentifier', ''), display_name=profile.get('ProfileDisplayName', ''), description=profile.get('ProfileDescription', ''), organization=profile.get('ProfileOrganization', ''), uuid=profile.get('ProfileUUID', ''), verification_state=profile.get('ProfileVerificationState', ''), install_date=parsed_date ) if utils.is_postgres(): profiles_to_be_added.append(profile_item) else: profile_item.save() if utils.is_postgres(): Profile.objects.bulk_create(profiles_to_be_added) stored_profiles = machine.profile_set.all() payloads_to_save = [] for stored_profile in stored_profiles: uuid = stored_profile.uuid identifier = stored_profile.identifier for profile in profiles_list: profile_uuid = profile.get('ProfileUUID', '') profile_id = profile.get('ProfileIdentifier', '') if uuid == profile_uuid and identifier == profile_id: payloads = profile.get('ProfileItems', []) for payload in payloads: payload_item = Payload( profile=stored_profile, identifier=payload.get('PayloadIdentifier', ''), uuid=payload.get('PayloadUUID', ''), payload_type=payload.get('PayloadType', '') ) if utils.is_postgres(): payloads_to_save.append(payload_item) else: payload_item.save() break if utils.is_postgres(): Payload.objects.bulk_create(payloads_to_save) utils.run_profiles_plugin_processing(machine, profiles_list) return HttpResponse("Profiles submitted for %s.\n" % submission.get('serial')) return HttpResponse("No profiles submitted.\n")
def inventory_submit(request): # list of bundleids to ignore bundleid_ignorelist = ['com.apple.print.PrinterProxy'] submission = request.POST serial = submission.get('serial').upper() machine = None if serial: try: machine = Machine.objects.get(serial=serial) except Machine.DoesNotExist: return HttpResponseNotFound('Serial Number not found') compression_type = 'base64bz2' if 'base64bz2inventory' in submission: compressed_inventory = submission.get('base64bz2inventory') elif 'base64inventory' in submission: compressed_inventory = submission.get('base64inventory') compression_type = 'base64' if compressed_inventory: compressed_inventory = compressed_inventory.replace(" ", "+") inventory_str = text_utils.decode_to_string(compressed_inventory, compression_type) try: inventory_list = plistlib.readPlistFromString(inventory_str) except Exception: inventory_list = None if inventory_list: try: inventory_meta = Inventory.objects.get(machine=machine) except Inventory.DoesNotExist: inventory_meta = Inventory(machine=machine) inventory_meta.sha256hash = \ hashlib.sha256(inventory_str).hexdigest() inventory_meta.inventory_str = inventory_str # clear existing inventoryitems machine.inventoryitem_set.all().delete() # insert current inventory items inventory_items_to_be_created = [] for item in inventory_list: app, _ = Application.objects.get_or_create( bundleid=item.get("bundleid", ""), name=item.get("name", ""), bundlename=item.get("CFBundleName", "")) # skip items in bundleid_ignorelist. if not item.get('bundleid') in bundleid_ignorelist: i_item = InventoryItem( application=app, version=item.get("version", ""), path=item.get('path', ''), machine=machine) if utils.is_postgres(): inventory_items_to_be_created.append(i_item) else: i_item.save() machine.last_inventory_update = timezone.now() inventory_meta.save() if utils.is_postgres(): InventoryItem.objects.bulk_create(inventory_items_to_be_created) return HttpResponse("Inventory submitted for %s.\n" % submission.get('serial')) return HttpResponse("No inventory submitted.\n")