示例#1
0
def get_csv_row(machine, facter_headers, condition_headers, plugin_script_headers):
    row = []
    for name, value in machine.get_fields():
        if name not in IGNORED_CSV_FIELDS:
            try:
                row.append(text_utils.safe_unicode(value))
            except Exception:
                row.append('')

    row.append(machine.machine_group.business_unit.name)
    row.append(machine.machine_group.name)
    return row
示例#2
0
文件: utils.py 项目: bdemetris/sal
def process_plugin_script(results, machine):
    rows_to_create = []

    results = get_newest_plugin_results(results)

    for plugin in results:
        plugin_name = plugin['plugin']
        historical = plugin.get('historical', False)
        if not historical:
            PluginScriptSubmission.objects.filter(
                machine=machine, plugin=safe_unicode(plugin_name)).delete()

        plugin_script = PluginScriptSubmission(
            machine=machine, plugin=safe_unicode(plugin_name), historical=historical)
        plugin_script.save()
        data = plugin.get('data')
        # Ill-formed plugin data will throw an exception here.
        if not isinstance(data, dict):
            return
        for key, value in data.items():
            plugin_row = PluginScriptRow(
                submission=safe_unicode(plugin_script),
                pluginscript_name=safe_unicode(key),
                pluginscript_data=safe_unicode(value),
                submission_and_script_name=(safe_unicode('{}: {}'.format(plugin_name, key))))
            if is_postgres():
                rows_to_create.append(plugin_row)
            else:
                plugin_row.save()

    if is_postgres() and rows_to_create:
        PluginScriptRow.objects.bulk_create(rows_to_create)
示例#3
0
def process_plugin_script(results, machine):
    rows_to_create = []

    results = get_newest_plugin_results(results)

    for plugin in results:
        plugin_name = plugin['plugin']
        historical = plugin.get('historical', False)
        if not historical:
            PluginScriptSubmission.objects.filter(
                machine=machine, plugin=safe_unicode(plugin_name)).delete()

        plugin_script = PluginScriptSubmission(
            machine=machine,
            plugin=safe_unicode(plugin_name),
            historical=historical)
        plugin_script.save()
        data = plugin.get('data')
        # Ill-formed plugin data will throw an exception here.
        if not isinstance(data, dict):
            return
        for key, value in data.items():
            plugin_row = PluginScriptRow(
                submission=safe_unicode(plugin_script),
                pluginscript_name=safe_unicode(key),
                pluginscript_data=safe_unicode(value),
                submission_and_script_name=(safe_unicode('{}: {}'.format(
                    plugin_name, key))))
            if is_postgres():
                rows_to_create.append(plugin_row)
            else:
                plugin_row.save()

    if is_postgres() and rows_to_create:
        PluginScriptRow.objects.bulk_create(rows_to_create)
示例#4
0
def get_csv_row(machine, facter_headers, condition_headers,
                plugin_script_headers):
    row = []
    for name, value in machine.get_fields():
        if name not in IGNORED_CSV_FIELDS:
            try:
                row.append(text_utils.safe_unicode(value))
            except Exception:
                row.append('')

    row.append(machine.machine_group.business_unit.name)
    row.append(machine.machine_group.name)
    return row
示例#5
0
    def get_context(self, machines, group_type='all', group_id=None):
        context = self.super_get_context(machines, group_type=group_type, group_id=group_id)
        catalog_objects = Catalog.objects.all()
        if group_type == 'business_unit':
            business_unit = get_object_or_404(BusinessUnit, pk=group_id)
            catalog_objects = catalog_objects.filter(machine_group__business_unit=business_unit)
        elif group_type == 'machine_group':
            catalog_objects = catalog_objects.filter(machine_group__pk=group_id)

        description_dict = {}
        for catalog in catalog_objects:
            safe_data = plistlib.readPlistFromString(safe_unicode(catalog.content))
            for pkginfo in safe_data:
                description_dict[pkginfo['name'], pkginfo['version']] = pkginfo.get(
                    'description', '')

        output = []
        # Get the install reports for the machines we're looking for
        installed_updates = InstalledUpdate.objects.filter(machine__in=machines).values(
            'update', 'display_name', 'update_version').order_by().distinct()

        for installed_update in installed_updates:
            item = {}
            item['version'] = installed_update['update_version']
            item['name'] = installed_update['update']
            item['description'] = description_dict.get((item['name'], item['version']), '')
            item['install_count'] = InstalledUpdate.objects.filter(
                machine__in=machines,
                update=installed_update['update'],
                update_version=installed_update['update_version'],
                installed=True).count()

            item['pending_count'] = PendingUpdate.objects.filter(
                machine__in=machines,
                update=installed_update['update'],
                update_version=installed_update['update_version']).count()

            item['installed_url'] = 'Installed?VERSION=%s&&NAME=%s' % (
                item['version'], item['name'])
            item['pending_url'] = 'Pending?VERSION=%s&&NAME=%s' % (
                item['version'], item['name'])

            item = self.replace_dots(item)

            output.append(item)

        context['output'] = sorted(output, key=lambda k: (k['name'], k['version']))
        context['thename'] = 'Install Report'
        return context
示例#6
0
    def get_context(self, machines, group_type='all', group_id=None):
        context = self.super_get_context(machines, group_type=group_type, group_id=group_id)
        catalog_objects = Catalog.objects.all()
        if group_type == 'business_unit':
            business_unit = get_object_or_404(BusinessUnit, pk=group_id)
            catalog_objects = catalog_objects.filter(machine_group__business_unit=business_unit)
        elif group_type == 'machine_group':
            catalog_objects = catalog_objects.filter(machine_group__pk=group_id)

        description_dict = {}
        for catalog in catalog_objects:
            safe_data = plistlib.readPlistFromString(safe_unicode(catalog.content))
            for pkginfo in safe_data:
                description_dict[pkginfo['name'], pkginfo['version']] = pkginfo.get(
                    'description', '')

        output = []
        # Get the install reports for the machines we're looking for
        installed_updates = InstalledUpdate.objects.filter(machine__in=machines).values(
            'update', 'display_name', 'update_version').order_by().distinct()

        for installed_update in installed_updates:
            item = {}
            item['version'] = installed_update['update_version']
            item['name'] = installed_update['update']
            item['description'] = description_dict.get((item['name'], item['version']), '')

            update_queryset = InstalledUpdate.objects.filter(
                machine__in=machines,
                update=installed_update['update'],
                update_version=installed_update['update_version'])
            item['install_count'] = update_queryset.filter(installed=True).count()
            item['pending_count'] = update_queryset.filter(installed=False).count()

            item['installed_url'] = 'Installed?VERSION=%s&&NAME=%s' % (
                item['version'], item['name'])
            item['pending_url'] = 'Pending?VERSION=%s&&NAME=%s' % (
                item['version'], item['name'])

            item = self.replace_dots(item)

            output.append(item)

        context['output'] = sorted(output, key=lambda k: (k['name'], k['version']))
        context['thename'] = 'Install Report'
        return context
示例#7
0
def process_conditions(machine, report_data):
    conditions_to_delete = machine.conditions.all()
    if conditions_to_delete.exists():
        conditions_to_delete._raw_delete(conditions_to_delete.db)
    conditions_to_be_created = []
    for condition_name, condition_data in report_data.get('Conditions', {}).items():
        # Skip the conditions that come from facter
        if 'Facter' in report_data and condition_name.startswith('facter_'):
            continue

        condition_data = text_utils.safe_unicode(text_utils.stringify(condition_data))
        condition = Condition(
            machine=machine, condition_name=condition_name, condition_data=condition_data)
        conditions_to_be_created.append(condition)

    if conditions_to_be_created:
        if IS_POSTGRES:
            Condition.objects.bulk_create(conditions_to_be_created)
        else:
            for condition in conditions_to_be_created:
                condition.save()
示例#8
0
def process_conditions(machine, report_data):
    conditions_to_delete = machine.conditions.all()
    if conditions_to_delete.exists():
        conditions_to_delete._raw_delete(conditions_to_delete.db)
    conditions_to_be_created = []
    for condition_name, condition_data in report_data.get('Conditions', {}).items():
        # Skip the conditions that come from facter
        if 'Facter' in report_data and condition_name.startswith('facter_'):
            continue

        condition_data = text_utils.safe_unicode(text_utils.stringify(condition_data))
        condition = Condition(
            machine=machine, condition_name=condition_name, condition_data=condition_data)
        conditions_to_be_created.append(condition)

    if conditions_to_be_created:
        if IS_POSTGRES:
            Condition.objects.bulk_create(conditions_to_be_created)
        else:
            for condition in conditions_to_be_created:
                condition.save()
示例#9
0
def checkin(request):
    if request.method != 'POST':
        print 'not post data'
        return HttpResponseNotFound('No POST data sent')

    data = request.POST
    key = data.get('key')
    uuid = data.get('uuid')
    serial = data.get('serial')
    serial = serial.upper()
    broken_client = data.get('broken_client', False)

    # Take out some of the weird junk VMware puts in. Keep an eye out in case
    # Apple actually uses these:
    serial = serial.replace('/', '')
    serial = serial.replace('+', '')

    # Are we using Sal for some sort of inventory (like, I don't know, Puppet?)
    try:
        add_new_machines = settings.ADD_NEW_MACHINES
    except Exception:
        add_new_machines = True

    if add_new_machines:
        # look for serial number - if it doesn't exist, create one
        if serial:
            try:
                machine = Machine.objects.get(serial=serial)
            except Machine.DoesNotExist:
                machine = Machine(serial=serial)
    else:
        machine = get_object_or_404(Machine, serial=serial)

    try:
        deployed_on_checkin = settings.DEPLOYED_ON_CHECKIN
    except Exception:
        deployed_on_checkin = True

    if key is None or key == 'None':
        try:
            key = settings.DEFAULT_MACHINE_GROUP_KEY
        except Exception:
            pass

    machine_group = get_object_or_404(MachineGroup, key=key)
    machine.machine_group = machine_group

    machine.last_checkin = django.utils.timezone.now()

    if bool(broken_client):
        machine.broken_client = True
        machine.save()
        return HttpResponse("Broken Client report submmitted for %s" %
                            data.get('serial'))
    else:
        machine.broken_client = False

    historical_days = utils.get_setting('historical_retention')

    machine.hostname = data.get('name', '<NO NAME>')

    if 'username' in data:
        if data.get('username') != '_mbsetupuser':
            machine.console_user = data.get('username')

    if 'base64bz2report' in data:
        machine.update_report(data.get('base64bz2report'))

    if 'base64report' in data:
        machine.update_report(data.get('base64report'), 'base64')

    if 'sal_version' in data:
        machine.sal_version = data.get('sal_version')

    # extract machine data from the report
    report_data = machine.get_report()
    if 'Puppet_Version' in report_data:
        machine.puppet_version = report_data['Puppet_Version']
    if 'ManifestName' in report_data:
        manifest = report_data['ManifestName']
        machine.manifest = manifest
    if 'MachineInfo' in report_data:
        machine.operating_system = report_data['MachineInfo'].get(
            'os_vers', 'UNKNOWN')
        # some machines are reporting 10.9, some 10.9.0 - make them the same
        if len(machine.operating_system) <= 4:
            machine.operating_system = machine.operating_system + '.0'

    # if gosal is the sender look for OSVers key
    if 'OSVers' in report_data['MachineInfo']:
        machine.operating_system = report_data['MachineInfo'].get('OSVers')

    machine.hd_space = report_data.get('AvailableDiskSpace') or 0
    machine.hd_total = int(data.get('disk_size')) or 0

    if machine.hd_total == 0:
        machine.hd_percent = 0
    else:
        machine.hd_percent = int(
            round(((float(machine.hd_total) - float(machine.hd_space)) /
                   float(machine.hd_total)) * 100))
    machine.munki_version = report_data.get('ManagedInstallVersion') or 0
    hwinfo = {}
    # macOS System Profiler
    if 'SystemProfile' in report_data.get('MachineInfo', []):
        for profile in report_data['MachineInfo']['SystemProfile']:
            if profile['_dataType'] == 'SPHardwareDataType':
                hwinfo = profile._items[0]
                break
    if 'HardwareInfo' in report_data.get('MachineInfo', []):
        hwinfo = report_data['MachineInfo']['HardwareInfo']
    if 'Puppet' in report_data:
        puppet = report_data.get('Puppet')
        if 'time' in puppet:
            machine.last_puppet_run = datetime.fromtimestamp(
                float(puppet['time']['last_run']))
        if 'events' in puppet:
            machine.puppet_errors = puppet['events']['failure']

    if hwinfo:
        # setup vars for hash keys we might get sent
        if 'MachineModel' in hwinfo:
            var_machine_model = 'MachineModel'
            var_cpu_type = 'CPUType'
            var_cpu_speed = 'CurrentProcessorSpeed'
            var_memory = 'PhysicalMemory'
        else:
            var_machine_model = 'machine_model'
            var_cpu_type = 'cpu_type'
            var_cpu_speed = 'current_processor_speed'
            var_memory = 'physical_memory'

        machine.machine_model = hwinfo.get(var_machine_model)
        machine.cpu_type = hwinfo.get(var_cpu_type)
        machine.cpu_speed = hwinfo.get(var_cpu_speed)
        machine.memory = hwinfo.get(var_memory)

        if hwinfo.get(var_memory)[-2:] == 'KB':
            machine.memory_kb = int(hwinfo.get(var_memory)[:-3])
        if hwinfo.get(var_memory)[-2:] == 'MB':
            memory_mb = float(hwinfo.get(var_memory)[:-3])
            machine.memory_kb = int(memory_mb * 1024)
        if hwinfo.get(var_memory)[-2:] == 'GB':
            memory_gb = float(hwinfo.get(var_memory)[:-3])
            machine.memory_kb = int(memory_gb * 1024 * 1024)
        if hwinfo.get(var_memory)[-2:] == 'TB':
            memory_tb = float(hwinfo.get(var_memory)[:-3])
            machine.memory_kb = int(memory_tb * 1024 * 1024 * 1024)

    if 'os_family' in report_data:
        machine.os_family = report_data['os_family']

    # support golang strict structure
    if 'OSFamily' in report_data:
        machine.os_family = report_data['OSFamily']

    if not machine.machine_model_friendly:
        try:
            machine.machine_model_friendly = utils.friendly_machine_model(
                machine)
        except Exception:
            machine.machine_model_friendly = machine.machine_model

    if deployed_on_checkin is True:
        machine.deployed = True

    machine.save()

    # If Plugin_Results are in the report, handle them
    try:
        datelimit = django.utils.timezone.now() - timedelta(
            days=historical_days)
        PluginScriptSubmission.objects.filter(recorded__lt=datelimit).delete()
    except Exception:
        pass

    if 'Plugin_Results' in report_data:
        utils.process_plugin_script(report_data.get('Plugin_Results'), machine)

    # Remove existing PendingUpdates for the machine
    machine.pending_updates.all().delete()
    now = django.utils.timezone.now()
    if 'ItemsToInstall' in report_data:
        pending_update_to_save = []
        update_history_item_to_save = []
        for update in report_data.get('ItemsToInstall'):
            display_name = update.get('display_name', update['name'])
            update_name = update.get('name')
            version = str(update['version_to_install'])
            if version:
                pending_update = PendingUpdate(machine=machine,
                                               display_name=display_name,
                                               update_version=version,
                                               update=update_name)
                if IS_POSTGRES:
                    pending_update_to_save.append(pending_update)
                else:
                    pending_update.save()
                # Let's handle some of those lovely pending installs into the UpdateHistory Model
                try:
                    update_history = UpdateHistory.objects.get(
                        name=update_name,
                        version=version,
                        machine=machine,
                        update_type='third_party')
                except UpdateHistory.DoesNotExist:
                    update_history = UpdateHistory(name=update_name,
                                                   version=version,
                                                   machine=machine,
                                                   update_type='third_party')
                    update_history.save()

                if not update_history.pending_recorded:
                    update_history_item = UpdateHistoryItem(
                        update_history=update_history,
                        status='pending',
                        recorded=now,
                        uuid=uuid)

                    update_history.pending_recorded = True
                    update_history.save()

                    if IS_POSTGRES:
                        update_history_item_to_save.append(update_history_item)
                    else:
                        update_history_item.save()

        if IS_POSTGRES:
            PendingUpdate.objects.bulk_create(pending_update_to_save)
            UpdateHistoryItem.objects.bulk_create(update_history_item_to_save)

    machine.installed_updates.all().delete()

    if 'ManagedInstalls' in report_data:
        # Due to a quirk in how Munki 3 processes updates with dependencies,
        # it's possible to have multiple entries in the ManagedInstalls list
        # that share an update_name and installed_version. This causes an
        # IntegrityError in Django since (machine_id, update, update_version)
        # must be unique.Until/(unless!) this is addressed in Munki, we need to
        # be careful to not add multiple items with the same name and version.
        # We'll store each (update_name, version) combo as we see them.
        seen_names_and_versions = []
        installed_updates_to_save = []
        for update in report_data.get('ManagedInstalls'):
            display_name = update.get('display_name', update['name'])
            update_name = update.get('name')
            version = str(update.get('installed_version', 'UNKNOWN'))
            installed = update.get('installed')
            if (update_name, version) not in seen_names_and_versions:
                seen_names_and_versions.append((update_name, version))
                if (version != 'UNKNOWN' and version is not None
                        and len(version) != 0):
                    installed_update = InstalledUpdate(
                        machine=machine,
                        display_name=display_name,
                        update_version=version,
                        update=update_name,
                        installed=installed)
                    if IS_POSTGRES:
                        installed_updates_to_save.append(installed_update)
                    else:
                        installed_update.save()
        if IS_POSTGRES:
            InstalledUpdate.objects.bulk_create(installed_updates_to_save)

    # Remove existing PendingAppleUpdates for the machine
    machine.pending_apple_updates.all().delete()
    if 'AppleUpdates' in report_data:
        for update in report_data.get('AppleUpdates'):
            display_name = update.get('display_name', update['name'])
            update_name = update.get('name')
            version = str(update['version_to_install'])
            try:
                pending_update = PendingAppleUpdate.objects.get(
                    machine=machine,
                    display_name=display_name,
                    update_version=version,
                    update=update_name)
            except PendingAppleUpdate.DoesNotExist:
                pending_update = PendingAppleUpdate(machine=machine,
                                                    display_name=display_name,
                                                    update_version=version,
                                                    update=update_name)
                pending_update.save()
            # Let's handle some of those lovely pending installs into the UpdateHistory Model
            try:
                update_history = UpdateHistory.objects.get(name=update_name,
                                                           version=version,
                                                           machine=machine,
                                                           update_type='apple')
            except UpdateHistory.DoesNotExist:
                update_history = UpdateHistory(name=update_name,
                                               version=version,
                                               machine=machine,
                                               update_type='apple')
                update_history.save()

            if not update_history.pending_recorded:
                update_history_item = UpdateHistoryItem(
                    update_history=update_history,
                    status='pending',
                    recorded=now,
                    uuid=uuid)
                update_history_item.save()
                update_history.pending_recorded = True
                update_history.save()

    # if Facter data is submitted, we need to first remove any existing facts for this machine
    if IS_POSTGRES:
        # If we are using postgres, we can just dump them all and do a bulk create
        if 'Facter' in report_data:
            facts = machine.facts.all().delete()
            try:
                datelimit = django.utils.timezone.now() - timedelta(
                    days=historical_days)
                HistoricalFact.objects.filter(
                    fact_recorded__lt=datelimit).delete()
            except Exception:
                pass
            try:
                historical_facts = settings.HISTORICAL_FACTS
            except Exception:
                historical_facts = []
                pass

            facts_to_be_created = []
            historical_facts_to_be_created = []
            for fact_name, fact_data in report_data['Facter'].iteritems():
                skip = False
                if hasattr(settings, 'IGNORE_FACTS'):
                    for prefix in settings.IGNORE_FACTS:
                        if fact_name.startswith(prefix):
                            skip = True
                if skip:
                    continue
                facts_to_be_created.append(
                    Fact(machine=machine,
                         fact_data=fact_data,
                         fact_name=fact_name))
                if fact_name in historical_facts:
                    historical_facts_to_be_created.append(
                        HistoricalFact(machine=machine,
                                       fact_data=fact_data,
                                       fact_name=fact_name))
            Fact.objects.bulk_create(facts_to_be_created)
            if len(historical_facts_to_be_created) != 0:
                HistoricalFact.objects.bulk_create(
                    historical_facts_to_be_created)

    else:
        if 'Facter' in report_data:
            facts = machine.facts.all()
            for fact in facts:
                skip = False
                if hasattr(settings, 'IGNORE_FACTS'):
                    for prefix in settings.IGNORE_FACTS:

                        if fact.fact_name.startswith(prefix):
                            skip = True
                            fact.delete()
                            break
                if not skip:
                    continue
                found = False
                for fact_name, fact_data in report_data['Facter'].iteritems():

                    if fact.fact_name == fact_name:
                        found = True
                        break
                if not found:
                    fact.delete()

            # Delete old historical facts

            try:
                datelimit = django.utils.timezone.now() - timedelta(
                    days=historical_days)
                HistoricalFact.objects.filter(
                    fact_recorded__lt=datelimit).delete()
            except Exception:
                pass
            try:
                historical_facts = settings.HISTORICAL_FACTS
            except Exception:
                historical_facts = []
                pass
            # now we need to loop over the submitted facts and save them
            facts = machine.facts.all()
            for fact_name, fact_data in report_data['Facter'].iteritems():
                if machine.os_family == 'Windows':
                    # We had a little trouble parsing out facts on Windows, clean up here
                    if fact_name.startswith('value=>'):
                        fact_name = fact_name.replace('value=>', '', 1)

                # does fact exist already?
                found = False
                skip = False
                if hasattr(settings, 'IGNORE_FACTS'):
                    for prefix in settings.IGNORE_FACTS:

                        if fact_name.startswith(prefix):
                            skip = True
                            break
                if skip:
                    continue
                for fact in facts:
                    if fact_name == fact.fact_name:
                        # it exists, make sure it's got the right info
                        found = True
                        if fact_data == fact.fact_data:
                            # it's right, break
                            break
                        else:
                            fact.fact_data = fact_data
                            fact.save()
                            break
                if not found:

                    fact = Fact(machine=machine,
                                fact_data=fact_data,
                                fact_name=fact_name)
                    fact.save()

                if fact_name in historical_facts:
                    fact = HistoricalFact(machine=machine,
                                          fact_name=fact_name,
                                          fact_data=fact_data,
                                          fact_recorded=datetime.now())
                    fact.save()

    if IS_POSTGRES:
        if 'Conditions' in report_data:
            machine.conditions.all().delete()
            conditions_to_be_created = []
            for condition_name, condition_data in report_data[
                    'Conditions'].iteritems():
                # Skip the conditions that come from facter
                if 'Facter' in report_data and condition_name.startswith(
                        'facter_'):
                    continue

                condition_data = text_utils.stringify(condition_data)
                conditions_to_be_created.append(
                    Condition(machine=machine,
                              condition_name=condition_name,
                              condition_data=text_utils.safe_unicode(
                                  condition_data)))

            Condition.objects.bulk_create(conditions_to_be_created)
    else:
        if 'Conditions' in report_data:
            conditions = machine.conditions.all()
            for condition in conditions:
                found = False
                for condition_name, condition_data in report_data[
                        'Conditions'].iteritems():
                    if condition.condition_name == condition_name:
                        found = True
                        break
                if found is False:
                    condition.delete()

            conditions = machine.conditions.all()
            for condition_name, condition_data in report_data[
                    'Conditions'].iteritems():
                # Skip the conditions that come from facter
                if 'Facter' in report_data and condition_name.startswith(
                        'facter_'):
                    continue

                # if it's a list (more than one result),
                # we're going to conacetnate it into one comma separated string.
                condition_data = text_utils.stringify(condition_data)

                found = False
                for condition in conditions:
                    if condition_name == condition.condition_name:
                        # it exists, make sure it's got the right info
                        found = True
                        if condition_data == condition.condition_data:
                            # it's right, break
                            break
                        else:
                            condition.condition_data = condition_data
                            condition.save()
                            break
                if found is False:
                    condition = Condition(
                        machine=machine,
                        condition_name=condition_name,
                        condition_data=text_utils.safe_unicode(condition_data))
                    condition.save()

    utils.run_plugin_processing(machine, report_data)

    if utils.get_setting('send_data') in (None, True):
        # If setting is None, it hasn't been configured yet; assume True
        current_version = utils.send_report()
    else:
        current_version = utils.get_current_release_version_number()
    if current_version:
        utils.set_setting('current_version', current_version)

    return HttpResponse("Sal report submmitted for %s" % data.get('name'))