Example #1
0
    def __init__(self, cmd, environment_vars=None):
        tmpdir = munkicommon.tmpdir()
        labelprefix = 'com.googlecode.munki.'
        # create a unique id for this job
        jobid = str(uuid.uuid1())

        self.label = labelprefix + jobid
        self.stdout_path = os.path.join(tmpdir, self.label + '.stdout')
        self.stderr_path = os.path.join(tmpdir, self.label + '.stderr')
        self.plist_path = os.path.join(tmpdir, self.label + '.plist')
        self.stdout = None
        self.stderr = None
        self.plist = {}
        self.plist['Label'] = self.label
        self.plist['ProgramArguments'] = cmd
        self.plist['StandardOutPath'] = self.stdout_path
        self.plist['StandardErrorPath'] = self.stderr_path
        if environment_vars:
            self.plist['EnvironmentVariables'] = environment_vars
        # write out launchd plist
        FoundationPlist.writePlist(self.plist, self.plist_path)
        # set owner, group and mode to those required
        # by launchd
        os.chown(self.plist_path, 0, 0)
        os.chmod(self.plist_path, int('644', 8))
        launchctl_cmd = ['/bin/launchctl', 'load', self.plist_path]
        proc = subprocess.Popen(launchctl_cmd,
                                shell=False,
                                bufsize=-1,
                                stdin=subprocess.PIPE,
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE)
        err = proc.communicate()[1]
        if proc.returncode:
            raise LaunchdJobException(err)
Example #2
0
def buildplist(nbiindex,
               nbidescription,
               nbiname,
               nbienabled,
               destdir=__file__):
    """buildplist takes a source, destination and name parameter that are used
        to create a valid plist for imagetool ingestion."""

    nbipath = os.path.join(destdir, nbiname + '.nbi')
    platformsupport = FoundationPlist.readPlist(
        os.path.join(nbipath, 'i386', 'PlatformSupport.plist'))
    enabledsystems = platformsupport.get('SupportedModelProperties')

    nbimageinfo = {
        'IsInstall': True,
        'Index': nbiindex,
        'Kind': 1,
        'Description': nbidescription,
        'Language': 'Default',
        'IsEnabled': nbienabled,
        'SupportsDiskless': False,
        'RootPath': 'NetInstall.dmg',
        'EnabledSystemIdentifiers': enabledsystems,
        'BootFile': 'booter',
        'Architectures': ['i386'],
        'BackwardCompatible': False,
        'DisabledSystemIdentifiers': [],
        'Type': 'NFS',
        'IsDefault': False,
        'Name': nbiname,
        'osVersion': '10.9'
    }

    plistfile = os.path.join(nbipath, 'NBImageInfo.plist')
    FoundationPlist.writePlist(nbimageinfo, plistfile)
Example #3
0
def escrow_key(key, username, runtype):
    theurl = pref('ServerURL')+"/checkin/"
    serial = GetMacSerial()
    macname = GetMacName()
    mydata=[('serial',serial),('recovery_password',key),('username',username),('macname',macname)]
    mydata=urllib.urlencode(mydata)
    req = Request(theurl, mydata)
    try:
        response = urlopen(req)
    except URLError, e:
        if hasattr(e, 'reason'):
            print 'We failed to reach a server.'
            print 'Reason: ', e.reason
            has_error = True
        elif hasattr(e, 'code'):
            print 'The server couldn\'t fulfill the request'
            print 'Error code: ', e.code
            has_error = True
        if has_error:
            plistData = {}
            plistData['recovery_key']=key
            plistData['username']=username
            try:
                FoundationPlist.writePlist(plistData, '/private/var/root/recovery_key.plist')
            except:
                os.makedirs('/usr/local/crypt')
                FoundationPlist.writePlist(plistData, '/private/var/root/recovery_key.plist')

            os.chmod('/private/var/root/recovery_key.plist',0700)
            if runtype=="initial":
                the_command = "/sbin/reboot"
                reboot = subprocess.Popen(the_command,shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE).communicate()[0]
Example #4
0
def main():
    ard_path = "/Library/Preferences/com.apple.RemoteDesktop.plist"
    if os.path.exists(ard_path):
        ard_prefs = FoundationPlist.readPlist(ard_path)
    else:
        ard_prefs = {}

    sal_result_key = "ARD_Info_{}"
    prefs_key_prefix = "Text{}"

    data = {
        sal_result_key.format(i): ard_prefs.get(prefs_key_prefix.format(i), "")
        for i in xrange(1, 5)
    }

    formatted_results = {
        "plugin": "ARD_Info",
        "historical": False,
        "data": data
    }

    if os.path.exists(RESULTS_PATH):
        plugin_results = FoundationPlist.readPlist(RESULTS_PATH)
    else:
        plugin_results = []

    plugin_results.append(formatted_results)

    FoundationPlist.writePlist(plugin_results, RESULTS_PATH)
Example #5
0
    def __init__(self, cmd, environment_vars=None):
        tmpdir = munkicommon.tmpdir
        LABELPREFIX = 'com.googlecode.munki.'
        # create a unique id for this job
        jobid = str(uuid.uuid1())

        self.label = LABELPREFIX + jobid
        self.stdout_path = os.path.join(tmpdir, self.label + '.stdout')
        self.stderr_path = os.path.join(tmpdir, self.label + '.stderr')
        self.plist_path = os.path.join(tmpdir, self.label + '.plist')
        self.stdout = None
        self.stderr = None
        self.plist = {}
        self.plist['Label'] = self.label
        self.plist['ProgramArguments'] = cmd
        self.plist['StandardOutPath'] = self.stdout_path
        self.plist['StandardErrorPath'] = self.stderr_path
        if environment_vars:
            self.plist['EnvironmentVariables'] = environment_vars
        # write out launchd plist
        FoundationPlist.writePlist(self.plist, self.plist_path)
        # set owner, group and mode to those required
        # by launchd
        os.chown(self.plist_path, 0, 0)
        os.chmod(self.plist_path, int('644', 8))
        launchctl_cmd = ['/bin/launchctl', 'load', self.plist_path]
        proc = subprocess.Popen(launchctl_cmd, shell=False, bufsize=1,
                                stdin=subprocess.PIPE,
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE)
        (unused_out, err) = proc.communicate()
        if proc.returncode:
            raise LaunchdJobException(err)
Example #6
0
    def copy_pkginfo_to_repo(self, pkginfo):
        """Saves pkginfo to munki_repo_path/pkgsinfo/subdirectory.
        Returns full path to the pkginfo in the repo."""
        # less error checking because we copy the installer_item
        # first and bail if it fails...
        repo_path = self.env["MUNKI_REPO"]
        subdirectory = self.env.get("repo_subdirectory", "")
        destination_path = os.path.join(repo_path, "pkgsinfo", subdirectory)
        if not os.path.exists(destination_path):
            try:
                os.makedirs(destination_path)
            except OSError as err:
                raise ProcessorError("Could not create %s: %s" %
                                     (destination_path, err.strerror))

        extension = self.env.get("MUNKI_PKGINFO_FILE_EXTENSION", "plist")
        if len(extension) > 0:
            extension = '.' + extension.strip(".")
        pkginfo_name = "%s-%s%s" % (pkginfo["name"],
                                    pkginfo["version"].strip(), extension)
        pkginfo_path = os.path.join(destination_path, pkginfo_name)
        index = 0
        while os.path.exists(pkginfo_path):
            index += 1
            pkginfo_name = "%s-%s__%s%s" % (
                pkginfo["name"], pkginfo["version"], index, extension)
            pkginfo_path = os.path.join(destination_path, pkginfo_name)

        try:
            FoundationPlist.writePlist(pkginfo, pkginfo_path)
        except OSError as err:
            raise ProcessorError("Could not write pkginfo %s: %s" %
                                 (pkginfo_path, err.strerror))
        return pkginfo_path
Example #7
0
 def write_plist(self, data, pathname):
     """writes a plist to pathname"""
     # pylint: disable=no-self-use
     try:
         FoundationPlist.writePlist(data, pathname)
     except Exception as err:
         raise ProcessorError("Could not write %s: %s" % (pathname, err))
Example #8
0
def buildplist(nbiindex, nbidescription, nbiname, nbienabled, destdir=__file__):
    """buildplist takes a source, destination and name parameter that are used
        to create a valid plist for imagetool ingestion."""

    nbipath = os.path.join(destdir, nbiname + '.nbi')
    platformsupport = FoundationPlist.readPlist(os.path.join(nbipath, 'i386', 'PlatformSupport.plist'))
    enabledsystems = platformsupport.get('SupportedModelProperties')

    nbimageinfo = {'IsInstall': True,
                   'Index': nbiindex,
                   'Kind': 1,
                   'Description': nbidescription,
                   'Language': 'Default',
                   'IsEnabled': nbienabled,
                   'SupportsDiskless': False,
                   'RootPath': 'NetInstall.dmg',
                   'EnabledSystemIdentifiers': enabledsystems,
                   'BootFile': 'booter',
                   'Architectures': ['i386'],
                   'BackwardCompatible': False,
                   'DisabledSystemIdentifiers': [],
                   'Type': 'NFS',
                   'IsDefault': False,
                   'Name': nbiname,
                   'osVersion': '10.9'}

    plistfile = os.path.join(nbipath, 'NBImageInfo.plist')
    FoundationPlist.writePlist(nbimageinfo, plistfile)
Example #9
0
def main():
    data = raw_battery_dict()
    # If this is not a laptop, data will just be empty. No need to do
    # more work.
    if data:
        adjusted_dict = adjusted_battery_dict()
        if adjusted_dict and "BatteryHealth" in adjusted_dict:
            data["BatteryHealth"] = adjusted_dict["BatteryHealth"]
        else:
            data["BatteryHealth"] = "Unkonwn"

    formatted_results = {
        "plugin": "Battery",
        "historical": False,
        "data": data
    }

    if os.path.exists(RESULTS_PATH):
        plugin_results = FoundationPlist.readPlist(RESULTS_PATH)
    else:
        plugin_results = []

    plugin_results.append(formatted_results)

    FoundationPlist.writePlist(plugin_results, RESULTS_PATH)
 def write_plist(self, data, pathname):
     """Write a plist to pathname."""
     # pylint: disable=no-self-use
     try:
         FoundationPlist.writePlist(data, pathname)
     except Exception as err:
         raise ProcessorError('Could not write %s: %s' % (pathname, err))
Example #11
0
def escrowKey(key, username, runtype):
    ##submit this to the server fv_status['recovery_password']
    theurl = pref('ServerURL')+"/checkin/"
    serial = GetMacSerial()
    macname = GetMacName()
    mydata=[('serial',serial),('recovery_password',key),('username',username),('macname',macname)]
    mydata=urllib.urlencode(mydata)
    req = Request(theurl, mydata)
    try:
        response = urlopen(req)
    except URLError, e:
        if hasattr(e, 'reason'):
            print 'We failed to reach a server.'
            print 'Reason: ', e.reason
            has_error = True
        #NSApp.terminate_(self)
        elif hasattr(e, 'code'):
            print 'The server couldn\'t fulfill the request'
            print 'Error code: ', e.code
            has_error = True
            #NSApp.terminate_(self)
            if has_error:
                plistData = {}
                plistData['recovery_key']=key
                plistData['username']=username
                
                FoundationPlist.writePlist(plistData, '/usr/local/crypt/recovery_key.plist')
                os.chmod('/usr/local/crypt/recovery_key.plist',0700)
                if runtype=="initial":
                    the_command = "/sbin/reboot"
            reboot = subprocess.Popen(the_command,shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE).communicate()[0]
Example #12
0
 def main(self):
     # Wrap in a try/finally so the temp_path is always removed.
     temp_path = None
     try:
         # Check munki version.
         if os.path.exists("/usr/local/munki/munkilib/version.plist"):
             # Assume 0.7.0 or higher.
             munkiopts = ("displayname", "description", "catalog")
         else:
             # Assume 0.6.0
             munkiopts = ("catalog",)
         
         # Copy pkg to a temporary local directory, as installer -query
         # (which is called by makepkginfo) doesn't work on network drives.
         if self.env["pkg_path"].endswith("pkg"):
             # Create temporary directory.
             temp_path = tempfile.mkdtemp(prefix="autopkg", dir="/private/tmp")
             
             # Copy the pkg there
             pkg_for_makepkginfo = os.path.join(temp_path, os.path.basename(self.env["pkg_path"]))
             shutil.copyfile(self.env["pkg_path"], pkg_for_makepkginfo)
         else:
             pkg_for_makepkginfo = self.env["pkg_path"]
         
         # Generate arguments for makepkginfo.
         args = ["/usr/local/munki/makepkginfo"]
         for option in munkiopts:
             if option in self.env:
                 args.append("--%s=%s" % (option, self.env[option]))
         args.append(pkg_for_makepkginfo)
         
         # Call makepkginfo.
         try:
             p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
             (out, err) = p.communicate()
         except OSError as e:
             raise ProcessorError("makepkginfo execution failed with error code %d: %s" % (
                                   e.errno, e.strerror))
         if p.returncode != 0:
             raise ProcessorError("creating pkginfo for %s failed: %s" % (self.env['pkg_path'], err))
         
     # makepkginfo cleanup.
     finally:
         if temp_path is not None:
             shutil.rmtree(temp_path)
     
     # Read output plist.
     output = FoundationPlist.readPlistFromString(out)
     
     # Set version and name.
     if "version" in self.env:
         output["version"] = self.env["version"]
     if "name" in self.env:
         output["name"] = self.env["name"]
     
     # Save info.
     self.env["munki_info"] = output
     if "info_path" in self.env:
         FoundationPlist.writePlist(output, self.env["info_path"])
Example #13
0
File: munki.py Project: oliof/munki
def writeSelfServiceManifest(optional_install_choices):
    '''Write out our self-serve manifest
    so managedsoftwareupdate can use it'''
    usermanifest = WRITEABLE_SELF_SERVICE_MANIFEST_PATH
    try:
        FoundationPlist.writePlist(optional_install_choices, usermanifest)
    except FoundationPlist.FoundationPlistException:
        pass
Example #14
0
def writeSelfServiceManifest(optional_install_choices):
    '''Write out our self-serve manifest
    so managedsoftwareupdate can use it'''
    usermanifest = "/Users/Shared/.SelfServeManifest"
    try:
        FoundationPlist.writePlist(optional_install_choices, usermanifest)
    except FoundationPlist.FoundationPlistException:
        pass
Example #15
0
def writeSelfServiceManifest(optional_install_choices):
    '''Write out our self-serve manifest
    so managedsoftwareupdate can use it'''
    usermanifest = WRITEABLE_SELF_SERVICE_MANIFEST_PATH
    try:
        FoundationPlist.writePlist(optional_install_choices, usermanifest)
    except FoundationPlist.FoundationPlistException:
        pass
Example #16
0
def autoset_timezone():
    """enable timezone based on current location"""
    das_plist = '/Library/Preferences/com.apple.timezone.auto.plist'
    enabler = FoundationPlist.readPlist(das_plist)
    val = enabler.get('Active')
    if val != 1:
        enabler['Active'] = 1
    FoundationPlist.writePlist(enabler, das_plist)
Example #17
0
def writeSelfServiceManifest(optional_install_choices):
    '''Write out our self-serve manifest
    so managedsoftwareupdate can use it'''
    usermanifest = "/Users/Shared/.SelfServeManifest"
    try:
        FoundationPlist.writePlist(optional_install_choices, usermanifest)
    except FoundationPlist.FoundationPlistException:
        pass
Example #18
0
def cacheSwupdMetadata():
    '''Copies ServerMetadata (.smd), Metadata (.pkm),
    and Distribution (.dist) files for the available updates
    to the local machine and writes a new sucatalog that refers
    to the local copies of these files.'''
    filtered_catalogpath = os.path.join(
        swupdCacheDir(), 'content/catalogs/filtered_index.sucatalog')
    catalog = FoundationPlist.readPlist(filtered_catalogpath)
    if 'Products' in catalog:
        product_keys = list(catalog['Products'].keys())
        for product_key in product_keys:
            munkicommon.display_status('Caching metadata for product ID %s',
                                       product_key)
            product = catalog['Products'][product_key]
            if 'ServerMetadataURL' in product:
                unused_path = replicateURLtoFilesystem(
                    product['ServerMetadataURL'], copy_only_if_missing=True)

            for package in product.get('Packages', []):
                ### not replicating the packages themselves ###
                #if 'URL' in package:
                #    unused_path = replicateURLtoFilesystem(
                #        package['URL'],
                #        copy_only_if_missing=fast_scan)
                if 'MetadataURL' in package:
                    munkicommon.display_status(
                        'Caching package metadata for product ID %s',
                        product_key)
                    unused_path = replicateURLtoFilesystem(
                        package['MetadataURL'], copy_only_if_missing=True)

            distributions = product['Distributions']
            for dist_lang in distributions.keys():
                munkicommon.display_status(
                    'Caching %s distribution for product ID %s', dist_lang,
                    product_key)
                dist_url = distributions[dist_lang]
                unused_path = replicateURLtoFilesystem(
                    dist_url, copy_only_if_missing=True)

        # rewrite URLs to point to local resources
        rewriteURLs(catalog, rewrite_pkg_urls=False)
        # write out the rewritten catalog
        localcatalogpath = os.path.join(swupdCacheDir(), 'content', 'catalogs')
        if not os.path.exists(localcatalogpath):
            try:
                os.makedirs(localcatalogpath)
            except OSError, oserr:
                raise ReplicationError(oserr)
        localcatalogpathname = os.path.join(localcatalogpath,
                                            'local_download.sucatalog')
        FoundationPlist.writePlist(catalog, localcatalogpathname)

        rewriteURLs(catalog, rewrite_pkg_urls=True)
        localcatalogpathname = os.path.join(localcatalogpath,
                                            'local_install.sucatalog')
        FoundationPlist.writePlist(catalog, localcatalogpathname)
Example #19
0
    def write_output_plist(self):
        """Write environment to output as plist."""

        if self.env is None:
            return

        try:
            FoundationPlist.writePlist(self.env, self.outfile)
        except BaseException as err:
            raise ProcessorError(err)
Example #20
0
def writeSelfServiceManifest(optional_install_choices):
    '''Write out our self-serve manifest
    so managedsoftwareupdate can use it. Returns True on success,
    False otherwise.'''
    usermanifest = WRITEABLE_SELF_SERVICE_MANIFEST_PATH
    try:
        FoundationPlist.writePlist(optional_install_choices, usermanifest)
        return True
    except FoundationPlist.FoundationPlistException:
        return False
Example #21
0
def writeSelfServiceManifest(optional_install_choices):
    '''Write out our self-serve manifest
    so managedsoftwareupdate can use it. Returns True on success,
    False otherwise.'''
    usermanifest = WRITEABLE_SELF_SERVICE_MANIFEST_PATH
    try:
        FoundationPlist.writePlist(optional_install_choices, usermanifest)
        return True
    except FoundationPlist.FoundationPlistException:
        return False
Example #22
0
    def write_output_plist(self):
        """Write environment to output as plist."""

        if self.env is None:
            return

        try:
            FoundationPlist.writePlist(self.env, self.outfile)
        except BaseException as err:
            raise ProcessorError(err)
def clear_clients():
    """Clear clients.plist in locationd settings."""
    auth_plist = {}
    das_plist = '/private/var/db/locationd/clients.plist'
    clients_dict = FoundationPlist.readPlist(das_plist)
    service_handler('unload')
    clients_dict = auth_plist
    FoundationPlist.writePlist(clients_dict, das_plist)
    os.chown(das_plist, 205, 205)
    service_handler('load')
	def create_munkipkginfo(self):
		# Set pkginfo plist path
		self.env["pkginfo_path"] = ("%s/%s.plist") % (self.env.get("RECIPE_CACHE_DIR"), self.env.get("NAME"))

		# Generate arguments for makepkginfo.
		args = ["/usr/local/munki/makepkginfo", self.env["pkg_path"]]
		if self.env.get("munkiimport_pkgname"):
			args.extend(["--pkgname", self.env["munkiimport_pkgname"]])
		if self.env.get("munkiimport_appname"):
			args.extend(["--appname", self.env["munkiimport_appname"]])
		if self.env.get("additional_makepkginfo_options"):
			args.extend(self.env["additional_makepkginfo_options"])
		if self.env.get("munkiimport_name"):
			args.extend(["--displayname", self.env["munkiimport_name"]])

		# Call makepkginfo.
		try:
			proc = subprocess.Popen(
				args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
			(out, err_out) = proc.communicate()
		except OSError as err:
			raise ProcessorError(
				"makepkginfo execution failed with error code %d: %s"
				% (err.errno, err.strerror))
		if proc.returncode != 0:
			raise ProcessorError(
				"creating pkginfo for %s failed: %s"
				% (self.env["pkg_path"], err_out))

		# Get pkginfo from output plist.
		pkginfo = FoundationPlist.readPlistFromString(out)

		# copy any keys from pkginfo in self.env
		if "pkginfo" in self.env:
			for key in self.env["pkginfo"]:
				pkginfo[key] = self.env["pkginfo"][key]

		# set an alternate version_comparison_key
		# if pkginfo has an installs item
		if "installs" in pkginfo and self.env.get("version_comparison_key"):
			for item in pkginfo["installs"]:
				if not self.env["version_comparison_key"] in item:
					raise ProcessorError(
						("version_comparison_key '%s' could not be found in "
						 "the installs item for path '%s'")
						% (self.env["version_comparison_key"], item["path"]))
				item["version_comparison_key"] = (
					self.env["version_comparison_key"])

		try:
			pkginfo_path = self.env["pkginfo_path"]
			FoundationPlist.writePlist(pkginfo, pkginfo_path)
		except OSError, err:
			raise ProcessorError("Could not write pkginfo %s: %s"
								 % (pkginfo_path, err.strerror))        
Example #25
0
def sysprefs_boxchk():
    """Enables location services in sysprefs globally"""
    uuid = ioreg()
    path_stub = "/private/var/db/locationd/Library/Preferences/ByHost/com.apple.locationd."
    das_plist = path_stub + uuid.strip() + ".plist"
    on_disk = FoundationPlist.readPlist(das_plist)
    val = on_disk.get('LocationServicesEnabled', None)
    if val != 1:
        on_disk['LocationServicesEnabled'] = 1
        FoundationPlist.writePlist(on_disk, das_plist)
        os.chown(das_plist, 205, 205)
Example #26
0
def escrow_key(key, username, runtype):
    theurl = pref('ServerURL')+"/checkin/"
    serial = GetMacSerial()
    macname = GetMacName()
    mydata=[('serial',serial),('recovery_password',key),('username',username),('macname',macname)]
    mydata=urllib.urlencode(mydata)
    # req = Request(theurl, mydata)
    # try:
    #     response = urlopen(req)
    # except URLError, e:
    #     if hasattr(e, 'reason'):
    #         print 'We failed to reach a server.'
    #         print 'Reason: ', e.reason
    #         has_error = True
    #     elif hasattr(e, 'code'):
    #         print 'The server couldn\'t fulfill the request'
    #         print 'Error code: ', e.code
    #         has_error = True
    cmd = ['/usr/bin/curl', '-fsSL', '--data', mydata, theurl]
    task = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    proc = task.communicate()[0]

    if task.returncode != 0:
        has_error = True
    else:
        has_error = False
    if has_error:
        plistData = {}
        plistData['recovery_key']=key
        plistData['username']=username
        try:
            FoundationPlist.writePlist(plistData, '/private/var/root/recovery_key.plist')
        except:
            os.makedirs('/usr/local/crypt')
            FoundationPlist.writePlist(plistData, '/private/var/root/recovery_key.plist')

        os.chmod('/private/var/root/recovery_key.plist',0700)
        if runtype=="initial":
            the_command = "/sbin/reboot"
            reboot = subprocess.Popen(the_command,shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE).communicate()[0]

    else:
        ##need some code to read in the json response from the server, and if the data matches, display success message, or failiure message, then reboot. If not, we need to cache it on disk somewhere - maybe pull it out with facter?
        #time to turn on filevault
        #NSLog(u"%s" % fvprefs['ServerURL'])
        ##escrow successful, if the file exists, remove it
        thePlist = '/private/var/root/recovery_key.plist'

        if os.path.exists(thePlist):
            os.remove(thePlist)
        if runtype=="initial":
            the_command = "/sbin/reboot"
            reboot = subprocess.Popen(the_command,shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE).communicate()[0]
Example #27
0
def startUpdateCheck(suppress_apple_update_check=False):
    '''Does launchd magic to run managedsoftwareupdate as root.'''
    try:
        if not os.path.exists(UPDATECHECKLAUNCHFILE):
            plist = {}
            plist['SuppressAppleUpdateCheck'] = suppress_apple_update_check
            try:
                FoundationPlist.writePlist(plist, UPDATECHECKLAUNCHFILE)
            except FoundationPlist.FoundationPlistException, err:
                # problem creating the trigger file
                raise ProcessStartError(err)
    except (OSError, IOError), err:
        raise ProcessStartError(err)
Example #28
0
File: munki.py Project: munki/munki
def startUpdateCheck(suppress_apple_update_check=False):
    '''Does launchd magic to run managedsoftwareupdate as root.'''
    try:
        if not os.path.exists(UPDATECHECKLAUNCHFILE):
            plist = {}
            plist['SuppressAppleUpdateCheck'] = suppress_apple_update_check
            try:
                FoundationPlist.writePlist(plist, UPDATECHECKLAUNCHFILE)
            except FoundationPlist.FoundationPlistException, err:
                # problem creating the trigger file
                raise ProcessStartError(err)
    except (OSError, IOError), err:
        raise ProcessStartError(err)
Example #29
0
def startUpdateCheck(suppress_apple_update_check=False):
    """Does launchd magic to run managedsoftwareupdate as root."""
    try:
        if not os.path.exists(UPDATECHECKLAUNCHFILE):
            plist = {}
            plist["SuppressAppleUpdateCheck"] = suppress_apple_update_check
            try:
                FoundationPlist.writePlist(plist, UPDATECHECKLAUNCHFILE)
            except FoundationPlist.FoundationPlistException:
                # problem creating the trigger file
                return 1
        return 0
    except (OSError, IOError):
        return 1
Example #30
0
def writeFilteredUpdateCatalog(updatelist):
    '''Write out a sucatalog containing only the updates
    listed in updatelist. updatelist is a list of ProductIDs.'''
    # our locally-cached catalog
    catalogpath = os.path.join(swupdCacheDir(),
        'content/catalogs/apple_index.sucatalog')
    catalog = FoundationPlist.readPlist(catalogpath)
    if 'Products' in catalog:
        filtered_products = {}
        for key in updatelist:
            filtered_products[key] = catalog['Products'][key]
        catalog['Products'] = filtered_products
    filtered_catalogpath = os.path.join(swupdCacheDir(),
            'content/catalogs/filtered_index.sucatalog')
    FoundationPlist.writePlist(catalog, filtered_catalogpath)
Example #31
0
def writeAppleUpdatesFile():
    '''Writes a file used by Managed Software Update.app to display
    available updates'''
    appleUpdates = getSoftwareUpdateInfo()
    if appleUpdates:
        plist = {}
        plist['AppleUpdates'] = appleUpdates
        FoundationPlist.writePlist(plist, appleUpdatesFile())
        return True
    else:
        try:
            os.unlink(appleUpdatesFile())
        except (OSError, IOError):
            pass
        return False
Example #32
0
def writeFilteredUpdateCatalog(updatelist):
    '''Write out a sucatalog containing only the updates
    listed in updatelist. updatelist is a list of ProductIDs.'''
    # our locally-cached catalog
    catalogpath = os.path.join(swupdCacheDir(),
                               'content/catalogs/apple_index.sucatalog')
    catalog = FoundationPlist.readPlist(catalogpath)
    if 'Products' in catalog:
        filtered_products = {}
        for key in updatelist:
            filtered_products[key] = catalog['Products'][key]
        catalog['Products'] = filtered_products
    filtered_catalogpath = os.path.join(
        swupdCacheDir(), 'content/catalogs/filtered_index.sucatalog')
    FoundationPlist.writePlist(catalog, filtered_catalogpath)
Example #33
0
def writeAppleUpdatesFile():
    '''Writes a file used by Managed Software Update.app to display
    available updates'''
    appleUpdates = getSoftwareUpdateInfo()
    if appleUpdates:
        plist = {}
        plist['AppleUpdates'] = appleUpdates
        FoundationPlist.writePlist(plist, appleUpdatesFile())
        return True
    else:
        try:
            os.unlink(appleUpdatesFile())
        except (OSError, IOError):
            pass
        return False
Example #34
0
 def create_bundle_info(self, template):
     info = template
     
     info["CFBundleShortVersionString"] = self.env['version']
     ver = self.env['version'].split(".")
     info["IFMajorVersion"] = ver[0]
     info["IFMinorVersion"] = ver[1]
     
     size, nfiles = self.get_pkgroot_size(self.env['pkgroot'])
     info["IFPkgFlagInstalledSize"] = size
     
     try:
         FoundationPlist.writePlist(info, self.env['infofile'])
     except BaseException as e:
         raise ProcessorError("Couldn't write %s: %s" % (self.env['infofile'], e))
Example #35
0
    def create_bundle_info(self, template):
        info = template

        info["CFBundleShortVersionString"] = self.env['version']
        ver = self.env['version'].split(".")
        info["IFMajorVersion"] = ver[0]
        info["IFMinorVersion"] = ver[1]

        size, nfiles = self.get_pkgroot_size(self.env['pkgroot'])
        info["IFPkgFlagInstalledSize"] = size

        try:
            FoundationPlist.writePlist(info, self.env['infofile'])
        except BaseException as e:
            raise ProcessorError("Couldn't write %s: %s" %
                                 (self.env['infofile'], e))
Example #36
0
def bulk(args):
    """Set a key on multiple pkginfo files."""
    if (len(args.pkginfo) is 1 and not args.pkginfo[0].endswith(
        (".plist", ".pkginfo"))):
        # File input
        paths_to_change = get_pkginfo_from_file(args.pkginfo[0])
    else:
        paths_to_change = args.pkginfo

    for path in paths_to_change:
        if os.path.exists(path):
            pkginfo = read_plist(path)
            if args.val == "-":
                remove_key(args.key, pkginfo)
            else:
                set_key(args.key, args.val, pkginfo)
            plistlib.writePlist(pkginfo, path)
Example #37
0
def add_maps():
    """maps dict for clients.plist in locationd settings"""
    com_apl_maps = {}  #"com.apple.Maps"
    com_apl_maps["Hide"] = 0
    com_apl_maps["Whitelisted"] = 0
    com_apl_maps["BundleId"] = "com.apple.Maps"
    com_apl_maps["BundlePath"] = "/Applications/Maps.app"
    com_apl_maps["Registered"] = ""
    com_apl_maps["Executable"] = "/Applications/Maps.app/Contents/MacOS/Maps"
    com_apl_maps[
        "Requirement"] = 'identifier "com.apple.Maps" and anchor apple'
    com_apl_maps["Authorized"] = 1
    das_plist = '/private/var/db/locationd/clients.plist'
    clients_dict = FoundationPlist.readPlist(das_plist)
    clients_dict['com.apple.Maps'] = com_apl_maps
    FoundationPlist.writePlist(clients_dict, das_plist)
    os.chown(das_plist, 205, 205)
Example #38
0
def store_profile_receipt_data(identifier, hash_value):
    '''Stores info for profile identifier.
    If hash_value is None, item is removed from the datastore.'''
    profile_data = profile_receipt_data()
    if hash_value is not None:
        profile_dict = profile_info_for_installed_identifier(identifier,
                                                             ignore_cache=True)
        install_date = profile_dict.get('ProfileInstallDate', 'UNKNOWN')
        profile_data[identifier] = {
            'FileHash': hash_value,
            'ProfileInstallDate': install_date
        }
    elif identifier in profile_data.keys():
        del profile_data[identifier]
    try:
        FoundationPlist.writePlist(profile_data, profile_receipt_data_path())
    except BaseException, err:
        munkicommon.display_error(
            'Cannot update hash for %s: %s' % (identifier, err))
Example #39
0
def store_profile_receipt_data(identifier, hash_value):
    '''Stores info for profile identifier.
    If hash_value is None, item is removed from the datastore.'''
    profile_data = profile_receipt_data()
    if hash_value is not None:
        profile_dict = profile_info_for_installed_identifier(identifier,
                                                             ignore_cache=True)
        install_date = profile_dict.get('ProfileInstallDate', 'UNKNOWN')
        profile_data[identifier] = {
            'FileHash': hash_value,
            'ProfileInstallDate': install_date
        }
    elif identifier in profile_data.keys():
        del profile_data[identifier]
    try:
        FoundationPlist.writePlist(profile_data, profile_receipt_data_path())
    except BaseException, err:
        munkicommon.display_error('Cannot update hash for %s: %s' %
                                  (identifier, err))
Example #40
0
File: ard_info.py Project: w0de/sal
def add_plugin_results(plugin, data, historical=False):
    """Add data to the shared plugin results plist.

    This function creates the shared results plist file if it does not
    already exist; otherwise, it adds the entry by appending.

    Args:
        plugin (str): Name of the plugin returning data.
        data (dict): Dictionary of results.
        historical (bool): Whether to keep only one record (False) or
            all results (True). Optional, defaults to False.
    """
    plist_path = '/usr/local/sal/plugin_results.plist'
    if os.path.exists(plist_path):
        plugin_results = FoundationPlist.readPlist(plist_path)
    else:
        plugin_results = []

    plugin_results.append({'plugin': plugin, 'historical': historical, 'data': data})
    FoundationPlist.writePlist(plugin_results, plist_path)
Example #41
0
def process_overrides(recipes, args, production_cat, pkginfo_template):
    """Start main processing loop.

    Args:
        recipes (list of str): Recipe names/ids to override.
        production_cat (Plist): Munki's 'production' catalog.
        pkginfo_template (Plist): Template pkginfo settings to apply.
    """
    for recipe in recipes:
        print SEPARATOR

        if recipe in RECIPE_EXCLUSIONS:
            print_error("Not overriding %s because it is in the list of "
                        "exclusions." % recipe)
            continue
        if recipe.startswith("local"):
            print_error("Not overriding %s because it _is_ an override." %
                        recipe)
            continue

        override_path = make_override(recipe, args.override_dir)
        if override_path is None:
            continue

        # Copy just-generated override's Input section to Input_Original.
        override = FoundationPlist.readPlist(override_path)
        override["Input_Original"] = override["Input"]
        override["Input"] = {}
        override["Input"]["pkginfo"] = {}

        current_version = get_current_production_version(
            production_cat, override, args)
        apply_current_or_orig_values(override, current_version, args)

        if not args.suppress_subdir:
            copy_package_path_to_input(override, current_version, args)

        if pkginfo_template:
            apply_pkginfo_template(override, pkginfo_template)

        FoundationPlist.writePlist(override, override_path)
Example #42
0
def process_overrides(recipes, args, production_cat, pkginfo_template):
    """Start main processing loop.

    Args:
        recipes (list of str): Recipe names/ids to override.
        production_cat (Plist): Munki's 'production' catalog.
        pkginfo_template (Plist): Template pkginfo settings to apply.
    """
    for recipe in recipes:
        print SEPARATOR

        if recipe in RECIPE_EXCLUSIONS:
            print_error("Not overriding %s because it is in the list of "
                        "exclusions." % recipe)
            continue
        if recipe.startswith("local"):
            print_error("Not overriding %s because it _is_ an override." %
                        recipe)
            continue

        override_path = make_override(recipe, args.override_dir)
        if override_path is None:
            continue

        # Copy just-generated override's Input section to Input_Original.
        override = FoundationPlist.readPlist(override_path)
        override["Input_Original"] = override["Input"]
        override["Input"] = {}
        override["Input"]["pkginfo"] = {}

        current_version = get_current_production_version(
            production_cat, override, args)
        apply_current_or_orig_values(override, current_version, args)

        if not args.suppress_subdir:
            copy_package_path_to_input(override, current_version, args)

        if pkginfo_template:
            apply_pkginfo_template(override, pkginfo_template)

        FoundationPlist.writePlist(override, override_path)
def sysprefs_boxchk():
    """Disable location services in sysprefs globally."""
    uuid = get_hardware_uuid()
    perfdir = "/private/var/db/locationd/Library/Preferences/ByHost/"
    if not os.path.exists(perfdir):
        os.makedirs(perfdir)
    path_stub = "/private/var/db/locationd/Library/Preferences/ByHost/com.apple.locationd."
    das_plist = path_stub + uuid.strip() + ".plist"
    try:
        on_disk = FoundationPlist.readPlist(das_plist)
    except:
        plist = {}
        FoundationPlist.writePlist(plist, das_plist)
        on_disk = FoundationPlist.readPlist(das_plist)
    val = on_disk.get('LocationServicesEnabled', None)
    if val != 0:
        service_handler('unload')
        on_disk['LocationServicesEnabled'] = 0
        FoundationPlist.writePlist(on_disk, das_plist)
        os.chown(das_plist, 205, 205)
        service_handler('load')
Example #44
0
def build_bare_dmg(source, cache, logpath, loglevel, repo_path):
  """Build a bare OS DMG for Donation/bare usage."""
  dmg_output_path = os.path.join(cache, 'Bare.hfs.dmg')
  if os.path.isfile(dmg_output_path):
    print "Donation image already found, not building.\n"
    return
  print "Creating AutoDMG-donation.adtmpl."
  templatepath = os.path.join(cache, 'AutoDMG-bare.adtmpl')

  plist = dict()
  plist["ApplyUpdates"] = True
  plist["SourcePath"] = source
  plist["TemplateFormat"] = "1.0"
  plist["VolumeName"] = "Macintosh HD"

  # Complete the AutoDMG-donation.adtmpl template
  plistlib.writePlist(plist, templatepath)
  autodmg_cmd = [
    '/Applications/AutoDMG.app/Contents/MacOS/AutoDMG'
  ]
  if os.getuid() == 0:
    # We are running as root
    print "Running as root."
    autodmg_cmd.append('--root')

  logfile = os.path.join(logpath, 'bare.log')

  # Now kick off the AutoDMG build
  print "Building bare image..."
  if os.path.isfile(dmg_output_path):
    os.remove(dmg_output_path)
  cmd = autodmg_cmd + [
    '-L', loglevel,
    '-l', logfile,
    'build', templatepath,
    '--download-updates',
    '-o', dmg_output_path]
  run(cmd)
  print "Moving bare image to DS Repo."
  populate_ds_repo(dmg_output_path, repo_path)
Example #45
0
def release(args):
    """Set keys relevent to production deployment."""
    if (len(args.pkginfo) is 1 and not args.pkginfo[0].endswith(
        (".plist", ".pkginfo"))):
        # File input
        paths_to_change = get_pkginfo_from_file(args.pkginfo[0])
    else:
        paths_to_change = args.pkginfo

    if not args.date:
        date = None
    elif not is_valid_date(args.date):
        print "Invalid date! Please check formatting."
        sys.exit(1)
    else:
        date = get_datetime(args.date)

    for path in paths_to_change:
        if os.path.exists(path):
            pkginfo = read_plist(path)
            set_force_install_after_date(date, pkginfo)
            set_unattended_install(True, pkginfo)
            set_catalog("production", pkginfo)
            plistlib.writePlist(pkginfo, path)
Example #46
0
def build_bare_dmg(source, cache, logpath, loglevel, repo_path):
    """Build a bare OS DMG for Donation/bare usage."""
    dmg_output_path = os.path.join(cache, 'Bare.hfs.dmg')
    if os.path.isfile(dmg_output_path):
        print "Donation image already found, not building.\n"
        return
    print "Creating AutoDMG-donation.adtmpl."
    templatepath = os.path.join(cache, 'AutoDMG-bare.adtmpl')

    plist = dict()
    plist["ApplyUpdates"] = True
    plist["SourcePath"] = source
    plist["TemplateFormat"] = "1.0"
    plist["VolumeName"] = "Macintosh HD"

    # Complete the AutoDMG-donation.adtmpl template
    plistlib.writePlist(plist, templatepath)
    autodmg_cmd = ['/Applications/AutoDMG.app/Contents/MacOS/AutoDMG']
    if os.getuid() == 0:
        # We are running as root
        print "Running as root."
        autodmg_cmd.append('--root')

    logfile = os.path.join(logpath, 'bare.log')

    # Now kick off the AutoDMG build
    print "Building bare image..."
    if os.path.isfile(dmg_output_path):
        os.remove(dmg_output_path)
    cmd = autodmg_cmd + [
        '-L', loglevel, '-l', logfile, 'build', templatepath,
        '--download-updates', '-o', dmg_output_path
    ]
    run(cmd)
    print "Moving bare image to DS Repo."
    populate_ds_repo(dmg_output_path, repo_path)
Example #47
0
 def writePlist(self, data, pathname):
     try:
         FoundationPlist.writePlist(data, pathname)
     except Exception, err:
         raise ProcessorError(
             'Could not write %s: %s' % (pathname, err))
Example #48
0
 def save(self, filename=None):
     if not filename:
         FoundationPlist.writePlist(self._bm, self.filename)
     else:
         FoundationPlist.writePlist(self._bm, filename)
Example #49
0
def main():
    """Main function."""
    wait_for_network()
    if not os.path.exists('/Applications/AutoDMG.app/Contents/MacOS/AutoDMG'):
        print "AutoDMG not at expected path in /Applications, quitting!"
        sys.exit(1)
    parser = argparse.ArgumentParser(
        description='Built a precached AutoDMG image.')
    parser.add_argument('-c',
                        '--catalog',
                        help='Catalog name. Defaults to "prod".',
                        default='prod')
    parser.add_argument('-m',
                        '--manifest',
                        help='Manifest name. Defaults to "prod".',
                        default='prod')
    parser.add_argument('-o',
                        '--output',
                        help='Path to DMG to create.',
                        default='AutoDMG_full.hfs.dmg')
    parser.add_argument('--cache',
                        help='Path to local cache to store files.'
                        ' Defaults to "/Library/AutoDMG"',
                        default='/Library/AutoDMG')
    parser.add_argument('-d',
                        '--download',
                        help='Force a redownload of all files.',
                        action='store_true',
                        default=False)
    parser.add_argument('-l',
                        '--logpath',
                        help='Path to log files for AutoDMG.',
                        default='/Library/AutoDMG/logs/')
    parser.add_argument('--custom',
                        help='Path to place custom resources. Defaults to '
                        '/Library/Managed Installs/client_resources/.',
                        default='/Library/Managed Installs/client_resources/')
    parser.add_argument('-s',
                        '--source',
                        help='Path to base OS installer.',
                        default='/Applications/Install OS X El Capitan.app')
    parser.add_argument('-v',
                        '--volumename',
                        help='Name of volume after imaging. '
                        'Defaults to "Macintosh HD."',
                        default='Macintosh HD')
    parser.add_argument('--loglevel',
                        help='Set loglevel between 1 and 7. Defaults to 6.',
                        choices=range(1, 8),
                        default=6,
                        type=int)
    parser.add_argument('--dsrepo', help='Path to DeployStudio repo. ')
    parser.add_argument('--noicons',
                        help="Don't cache icons.",
                        action='store_true',
                        default=False)
    parser.add_argument('-u',
                        '--update',
                        help='Update the profiles plist.',
                        action='store_true',
                        default=False)
    parser.add_argument('--disableupdates',
                        help='Disable updates to built image via AutoDMG',
                        action='store_false',
                        default=True)
    parser.add_argument('--movefile',
                        help="Path to move file to after building.")
    parser.add_argument('--extras',
                        help='Path to JSON file containing additions '
                        ' and exceptions lists.')
    args = parser.parse_args()

    print "Using Munki repo: %s" % MUNKI_URL
    global CACHE
    CACHE = args.cache

    if "https" in MUNKI_URL and not BASIC_AUTH:
        print >> sys.stderr, "Error: HTTPS was used but no auth provided."
        sys.exit(2)

    print time.strftime("%c")
    print "Starting run..."
    # Create the local cache directories
    dir_struct = {
        'additions': os.path.join(CACHE, 'additions'),
        'catalogs': os.path.join(CACHE, 'catalogs'),
        'downloads': os.path.join(CACHE, 'downloads'),
        'exceptions': os.path.join(CACHE, 'exceptions'),
        'manifests': os.path.join(CACHE, 'manifests'),
        'icons': os.path.join(CACHE, 'icons'),
        'logs': os.path.join(CACHE, 'logs')
    }
    path_creation = prepare_local_paths(dir_struct.values())
    if path_creation > 0:
        print "Error setting up local cache directories."
        sys.exit(-1)

    # These are necessary to populate the globals used in updatecheck
    keychain_obj = keychain.MunkiKeychain()
    manifestpath = updatecheck.getPrimaryManifest(args.manifest)
    updatecheck.getPrimaryManifestCatalogs(args.manifest)
    updatecheck.getCatalogs([args.catalog])

    installinfo = {}
    installinfo['processed_installs'] = []
    installinfo['processed_uninstalls'] = []
    installinfo['managed_updates'] = []
    installinfo['optional_installs'] = []
    installinfo['managed_installs'] = []
    installinfo['removals'] = []
    updatecheck.processManifestForKey(manifestpath, 'managed_installs',
                                      installinfo)
    # installinfo['managed_installs'] now contains a list of all managed_installs
    install_list = []
    for item in installinfo['managed_installs']:
        detail = updatecheck.getItemDetail(item['name'], [args.catalog])
        if detail:
            install_list.append(detail)

    # Prior to downloading anything, populate the lists
    additions_list = list()
    item_list = list()
    except_list = list()
    exceptions = list()
    # exceptions[] is a list of exceptions specified by the extras file
    # except_list is a list of files downloaded into the exceptions dir
    if args.extras:
        # Additions are downloaded & added to the additions_list
        # Exceptions are added to the exceptions list,
        # Downloaded exceptions are added to the except_list list.
        handle_extras(args.extras, dir_struct['exceptions'],
                      dir_struct['additions'], args.download, exceptions,
                      except_list, additions_list)

    # Check for managed_install items and download them
    process_managed_installs(install_list, exceptions, except_list, item_list,
                             dir_struct['exceptions'], dir_struct['downloads'],
                             args.download)

    # Icon handling
    if not args.noicons:
        # Get icons for Managed Updates, Optional Installs and removals
        updatecheck.processManifestForKey(manifestpath, 'managed_updates',
                                          installinfo)
        updatecheck.processManifestForKey(manifestpath, 'managed_uninstalls',
                                          installinfo)
        updatecheck.processManifestForKey(manifestpath, 'optional_installs',
                                          installinfo)
        icon_pkg_file = handle_icons(dir_struct['icons'], installinfo)
    if icon_pkg_file:
        additions_list.extend([icon_pkg_file])

    # Munki custom resources handling
    custom_pkg_file = handle_custom(args.custom)
    if custom_pkg_file:
        additions_list.extend([custom_pkg_file])

    # Clean up cache of items we don't recognize
    cleanup_local_cache(item_list, dir_struct['downloads'])
    cleanup_local_cache(except_list, dir_struct['exceptions'])

    # Build the package of exceptions, if any
    if except_list:
        pkg_output_file = os.path.join(CACHE, 'munki_cache.pkg')
        success = build_pkg(dir_struct['exceptions'], 'munki_cache',
                            'com.facebook.cpe.munki_exceptions',
                            '/Library/Managed Installs/Cache', CACHE,
                            'Building exceptions package')
        if success:
            additions_list.extend([pkg_output_file])
        else:
            print "Failed to build exceptions package!"

    loglevel = str(args.loglevel)

    # Run any extra code or package builds
    sys.stdout.flush()
    pkg_list = autodmg_org.run_unique_code(args)
    additions_list.extend(pkg_list)

    # Now that cache is downloaded, let's add it to the AutoDMG template.
    print "Creating AutoDMG-full.adtmpl."
    templatepath = os.path.join(CACHE, 'AutoDMG-full.adtmpl')

    plist = dict()
    plist["ApplyUpdates"] = args.disableupdates
    plist["SourcePath"] = args.source
    plist["TemplateFormat"] = "1.0"
    plist["VolumeName"] = args.volumename
    plist["AdditionalPackages"] = [
        os.path.join(dir_struct['downloads'], f)
        for f in os.listdir(dir_struct['downloads'])
        if (not f == '.DS_Store') and (f not in additions_list)
    ]

    if additions_list:
        plist["AdditionalPackages"].extend(additions_list)

    # Complete the AutoDMG-full.adtmpl template
    plistlib.writePlist(plist, templatepath)
    autodmg_cmd = ['/Applications/AutoDMG.app/Contents/MacOS/AutoDMG']
    if os.getuid() == 0:
        # We are running as root
        print "Running as root."
        autodmg_cmd.append('--root')
    if args.update:
        # Update the profiles plist too
        print "Updating UpdateProfiles.plist..."
        cmd = autodmg_cmd + ['update']
        run(cmd)

    logfile = os.path.join(args.logpath, 'build.log')
    # Now kick off the AutoDMG build
    dmg_output_path = os.path.join(CACHE, args.output)
    sys.stdout.flush()
    print "Building disk image..."
    if os.path.isfile(dmg_output_path):
        os.remove(dmg_output_path)
    cmd = autodmg_cmd + [
        '-L', loglevel, '-l', logfile, 'build', templatepath,
        '--download-updates', '-o', dmg_output_path
    ]
    print "Full command: %s" % cmd
    run(cmd)
    if not os.path.isfile(dmg_output_path):
        print >> sys.stderr, "Failed to create disk image!"
        sys.exit(1)

    # Check the Deploystudio masters to see if this image already exists
    sys.stdout.flush()
    if args.dsrepo:
        populate_ds_repo(dmg_output_path, args.dsrepo)

    if args.movefile:
        move_file(dmg_output_path, args.movefile)

    print "Ending run."
    print time.strftime("%c")
Example #50
0
def main():
    """Main function."""
    wait_for_network()
    if not os.path.exists('/Applications/AutoDMG.app/Contents/MacOS/AutoDMG'):
        print("AutoDMG not at expected path in /Applications, quitting!")
        sys.exit(1)
    parser = argparse.ArgumentParser(
        description='Built a precached AutoDMG image.')
    parser.add_argument(
        '-m', '--manifest', help='Manifest name. Defaults to "prod".',
        default='prod')
    parser.add_argument(
        '-o', '--output', help='Path to DMG to create.',
        default='AutoDMG_full.hfs.dmg')
    parser.add_argument(
        '--cache', help=(
            'Path to local cache to store files. '
            'Defaults to "/Library/AutoDMG"'),
        default='/Library/AutoDMG')
    parser.add_argument(
        '-d', '--download', help='Force a redownload of all files.',
        action='store_true', default=False)
    parser.add_argument(
        '-l', '--logpath', help='Path to log files for AutoDMG.',
        default='/Library/AutoDMG/logs/')
    parser.add_argument(
        '-s', '--source', help='Path to base OS installer.',
        default='/Applications/Install OS X El Capitan.app')
    parser.add_argument(
        '-n', '--volumename', help=(
            'Name of volume after imaging. '
            'Defaults to "Macintosh HD."'),
        default='Macintosh HD')
    parser.add_argument(
        '-S', '--volumesize', help=(
            'Size of volume after imaging. '
            'Defaults to 120'),
        default=120)
    parser.add_argument(
        '--loglevel', help='Set loglevel between 1 and 7. Defaults to 6.',
        choices=range(1, 8), type=int, default=6)
    parser.add_argument(
        '--dsrepo', help='Path to DeployStudio repo. ')
    parser.add_argument(
        '--noicons', help="Don't cache icons.",
        action='store_true', default=False)
    parser.add_argument(
        '-u', '--update', help='Update the profiles plist.',
        action='store_true', default=False)
    parser.add_argument(
        '--extras', help=(
            'Path to JSON file containing additions '
            ' and exceptions lists.')
    )
    args = parser.parse_args()

    print("Using Munki repo: %s" % MUNKI_URL)
    global CACHE
    CACHE = args.cache

    print(time.strftime("%c"))
    print("Starting run...")
    # Create the local cache directories
    dir_struct = {
        'additions': os.path.join(CACHE, 'additions'),
        'catalogs': os.path.join(CACHE, 'catalogs'),
        'downloads': os.path.join(CACHE, 'downloads'),
        'exceptions': os.path.join(CACHE, 'exceptions'),
        'exceptions_pkgs': os.path.join(CACHE, 'exceptions_pkgs'),
        'manifests': os.path.join(CACHE, 'manifests'),
        'icons': os.path.join(CACHE, 'icons'),
        'logs': os.path.join(CACHE, 'logs'),
        'client_resources': os.path.join(CACHE, 'client_resources'),
    }
    path_creation = prepare_local_paths(dir_struct.values())
    if path_creation > 0:
        print("Error setting up local cache directories.")
        sys.exit(-1)

    # Populate the list of installs based on the manifest
    install_list = gather_install_list(args.manifest)

    # Prior to downloading anything, populate the other lists
    additions_list = list()
    item_list = list()
    except_list = list()
    exceptions = list()
    # exceptions[] is a list of exceptions specified by the extras file
    # except_list[] is a list of files downloaded into the exceptions dir
    if args.extras:
        # Additions are downloaded & added to the additions_list
        # Exceptions are added to the exceptions list,
        # Downloaded exceptions are added to the except_list list.
        handle_extras(
            args.extras,
            dir_struct['exceptions'],
            dir_struct['additions'],
            args.download,
            exceptions,
            except_list,
            additions_list
        )

    # Check for managed_install items and download them
    process_managed_installs(
        install_list,
        exceptions,
        except_list,
        item_list,
        dir_struct['exceptions'],
        dir_struct['downloads'],
        args.download
    )

    # Clean up cache of items we don't recognize
    print("Cleaning up downloads folder...")
    cleanup_local_cache(item_list, dir_struct['downloads'])
    print("Cleaning up exceptions folder...")
    cleanup_local_cache(except_list, dir_struct['exceptions'])

    # Icon handling
    if not args.noicons:
        # Download all icons from the catalogs used by the manifest
        catalog_item_list = []
        for catalog in os.listdir(dir_struct['catalogs']):
            catalog_item_list += plistlib.readPlist(
                os.path.join(dir_struct['catalogs'], catalog)
            )
        icon_pkg_file = handle_icons(catalog_item_list)
    if icon_pkg_file:
        additions_list.extend([icon_pkg_file])

    # Munki custom resources handling
    custom_pkg_file = handle_custom()
    if custom_pkg_file:
        additions_list.extend([custom_pkg_file])

    # Build each exception into its own package
    sys.stdout.flush()
    exceptions_pkg_list = build_exceptions(CACHE)
    additions_list.extend(exceptions_pkg_list)

    loglevel = str(args.loglevel)

    # Run any extra code or package builds
    sys.stdout.flush()
    pkg_list = autodmg_org.run_unique_code(args)
    additions_list.extend(pkg_list)

    # Now that cache is downloaded, let's add it to the AutoDMG template.
    print("Creating AutoDMG-full.adtmpl.")
    templatepath = os.path.join(CACHE, 'AutoDMG-full.adtmpl')

    plist = dict()
    plist["ApplyUpdates"] = True
    plist["SourcePath"] = args.source
    plist["TemplateFormat"] = "1.0"
    plist["VolumeName"] = args.volumename
    plist["VolumeSize"] = args.volumesize
    plist["AdditionalPackages"] = [
        os.path.join(
            dir_struct['downloads'], f
        ) for f in os.listdir(
            dir_struct['downloads']
        ) if (not f == '.DS_Store') and (f not in additions_list)
    ]

    if additions_list:
        plist["AdditionalPackages"].extend(additions_list)

    # Complete the AutoDMG-full.adtmpl template
    plistlib.writePlist(plist, templatepath)
    autodmg_cmd = [
        '/Applications/AutoDMG.app/Contents/MacOS/AutoDMG'
    ]
    if os.getuid() == 0:
        # We are running as root
        print("Running as root.")
        autodmg_cmd.append('--root')
    if args.update:
        # Update the profiles plist too
        print("Updating UpdateProfiles.plist...")
        cmd = autodmg_cmd + ['update']
        run(cmd)

    # Clean up cache of items we don't recognize
    print("Cleaning up downloads folder...")
    cleanup_local_cache(item_list, dir_struct['downloads'])
    print("Cleaning up exceptions folder...")
    cleanup_local_cache(except_list, dir_struct['exceptions'])

    logfile = os.path.join(args.logpath, 'build.log')
    # Now kick off the AutoDMG build
    dmg_output_path = os.path.join(CACHE, args.output)
    sys.stdout.flush()
    print("Building disk image...")
    if os.path.isfile(dmg_output_path):
        os.remove(dmg_output_path)
    cmd = autodmg_cmd + [
        '-L', loglevel,
        '-l', logfile,
        'build', templatepath,
        '--download-updates',
        '-o', dmg_output_path]
    print("Full command: %s" % cmd)
    run(cmd)
    if not os.path.isfile(dmg_output_path):
        print("Failed to create disk image!", file=sys.stderr)
        sys.exit(1)

    sys.stdout.flush()
    if args.dsrepo:
        # Check the Deploystudio masters to see if this image already exists
        populate_ds_repo(dmg_output_path, args.dsrepo)

    print("Ending run.")
    print(time.strftime("%c"))
Example #51
0
        extension = self.env.get("MUNKI_PKGINFO_FILE_EXTENSION", "plist")
        if len(extension) > 0:
            extension = '.' + extension.strip(".")
        pkginfo_name = "%s-%s%s" % (pkginfo["name"],
                                    pkginfo["version"].strip(), extension)
        pkginfo_path = os.path.join(destination_path, pkginfo_name)
        index = 0
        while os.path.exists(pkginfo_path):
            index += 1
            pkginfo_name = "%s-%s__%s%s" % (
                pkginfo["name"], pkginfo["version"], index, extension)
            pkginfo_path = os.path.join(destination_path, pkginfo_name)

        try:
            FoundationPlist.writePlist(pkginfo, pkginfo_path)
        except OSError, err:
            raise ProcessorError("Could not write pkginfo %s: %s" %
                                 (pkginfo_path, err.strerror))
        return pkginfo_path

    def main(self):
        # clear any pre-exising summary result
        if 'munki_importer_summary_result' in self.env:
            del self.env['munki_importer_summary_result']
        # Generate arguments for makepkginfo.
        args = ["/usr/local/munki/makepkginfo", self.env["pkg_path"]]
        if self.env.get("munkiimport_pkgname"):
            args.extend(["--pkgname", self.env["munkiimport_pkgname"]])
        if self.env.get("munkiimport_appname"):
            args.extend(["--appname", self.env["munkiimport_appname"]])
    def main(self):
        if self.env.get("ALLOW_CCDA_INSTALLED", False):
            self.check_ccda_installed()

        # establish some of our expected build paths
        expected_output_root = os.path.join(self.env["RECIPE_CACHE_DIR"], self.env["package_name"])
        self.env["pkg_path"] = os.path.join(expected_output_root, "Build/%s_Install.pkg" % self.env["package_name"])
        self.env["uninstaller_pkg_path"] = os.path.join(expected_output_root,
                                                        "Build/%s_Uninstall.pkg" % self.env["package_name"])

        saved_automation_xml_path = os.path.join(expected_output_root,
                                                 '.ccp_automation_input.xml')
        automation_manifest_plist_path = os.path.join(expected_output_root,
                                                      '.autopkg_manifest.plist')
        self.set_customer_type(self.env['ccpinfo'])

        # ccpinfo Needs pre-processing before comparison to old run
        new_manifest = self.automation_xml()

        # Handle any pre-existing package at the expected location, and end early if it matches our
        # input manifest
        if os.path.exists(automation_manifest_plist_path):
            existing_manifest = FoundationPlist.readPlist(automation_manifest_plist_path)
            self.output("Found existing CCP package build automation info, comparing")
            self.output("existing build: %s" % existing_manifest)
            self.output("current build: %s" % new_manifest)
            if new_manifest == existing_manifest:
                self.output("Returning early because we have an existing package "
                            "with the same parameters.")
                return

        # Going forward with building, set up or clear needed directories
        xml_workdir = os.path.join(self.env["RECIPE_CACHE_DIR"], 'automation_xml')
        if not os.path.exists(xml_workdir):
            os.mkdir(xml_workdir)
        if os.path.isdir(expected_output_root):
            shutil.rmtree(expected_output_root)

        # using .xml as a suffix because CCP's automation mode creates a '<input>_results.xml' file with the assumption
        # that the input ends in '.xml'
        xml_path = os.path.join(xml_workdir, 'ccp_automation_%s.xml' % self.env['package_name'])
        with open(xml_path, 'w') as fd:
            fd.write(new_manifest)

        if self.is_ccp_running():
            raise ProcessorError(
                "You cannot start a Creative Cloud Packager automation workflow " +
                "if Creative Cloud Packager is already running. Please quit CCP and start this recipe again."
            )

        self.check_and_disable_appnap_for_pdapp()

        cmd = [
            '/Applications/Utilities/Adobe Application Manager/core/Adobe Application Manager.app/Contents/MacOS/PDApp',
            '--appletID=CCP_UI',
            '--appletVersion=1.0',
            '--workflow=ccp',
            '--automationMode=ccp_automation',
            '--pkgConfigFile=%s' % xml_path]
        self.output("Executing CCP build command: %s" % " ".join(cmd))
        proc = subprocess.Popen(cmd,
                                stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        out, _ = proc.communicate()
        if out:
            self.output("CCP Output: %s" % out)
        exitcode = proc.returncode
        self.output("CCP Exited with status {}".format(exitcode))

        results_file = os.path.join(os.path.dirname(xml_path), os.path.splitext(xml_path)[0] + '_result.xml')
        results_elem = ElementTree.parse(results_file).getroot()
        if results_elem.find('error') is not None:
            # Build an AutoPkg error message with help to diagnose
            # possible build failures
            autopkg_error_msg = "CCP package build reported failure.\n"
            err_msg_type = results_elem.find('error/errorMessage')
            if err_msg_type is not None:
                autopkg_error_msg += "Error type: '%s' - " % err_msg_type.text
            if err_msg_type.text in CCP_ERROR_MSGS:
                autopkg_error_msg += CCP_ERROR_MSGS[err_msg_type.text] + "\n"
            autopkg_error_msg += (
                "Please inspect the PDApp log file at: %s. 'results' XML file "
                "contents follow: \n%s" % (
                    os.path.expanduser("~/Library/Logs/PDApp.log"),
                    open(results_file, 'r').read()))

            raise ProcessorError(autopkg_error_msg)

        if results_elem.find('success') is None:
            raise ProcessorError(
                "Unexpected result from CCP, 'results' XML file contents follow: \n{}".format(
                    open(results_file, 'r').read()
                )
            )

        # Sanity-check that we really do have our install package!
        if not os.path.exists(self.env["pkg_path"]):
            raise ProcessorError(
                "CCP exited successfully, but no expected installer package "
                "at %s exists." % self.env["pkg_path"])

        # Save both the automation XML for posterity and our manifest plist for
        # later comparison
        shutil.copy(xml_path, saved_automation_xml_path)
        # TODO: we aren't scrubbing the automation XML file at all
        FoundationPlist.writePlist(
            self.env['ccpinfo'],
            automation_manifest_plist_path)

        # Save PackageInfo.txt
        packageinfo = os.path.join(expected_output_root, "PackageInfo.txt")
        if os.path.exists(packageinfo):
            self.env["package_info_text"] = open(packageinfo, 'r').read()

        ccp_path = os.path.join(expected_output_root, 'Build/{}.ccp'.format(self.env["package_name"]))
        if os.path.exists(ccp_path):
            self.env["ccp_path"] = ccp_path

        option_xml_root = ElementTree.parse(os.path.join(
            self.env["pkg_path"], 'Contents/Resources/optionXML.xml')).getroot()

        # Save the CCP build version
        self.env["ccp_version"] = ""
        ccp_version = option_xml_root.find("prodVersion")
        if ccp_version is None:
            self.output(
                "WARNING: Didn't find expected 'prodVersion' key (CCP "
                "version) in optionXML.xml")
        self.env["ccp_version"] = ccp_version.text

        if len(self.env['ccpinfo']['Products']) == 1:
            built_products = self.env['ccpinfo']['Products'][0]['sapCode']
        else:
            built_products = '(multiple)'
        self.env["creative_cloud_packager_summary_result"] = {
            'summary_text': 'The following CCP packages were built:',
            'report_fields': ['display_name', 'product_id', 'version', 'pkg_path'],
            'data': {
                'display_name': self.env['display_name'],
                'product_id': built_products,
                'version': self.env['version'],
                'pkg_path': self.env['pkg_path'],
            }
        }
 def synchronize(self):
     """Writes the current plist to disk"""
     plistlib.writePlist(self.internalPlist, self.plistPath)
Example #54
0
def cacheSwupdMetadata():
    '''Copies ServerMetadata (.smd), Metadata (.pkm),
    and Distribution (.dist) files for the available updates
    to the local machine and writes a new sucatalog that refers
    to the local copies of these files.'''
    filtered_catalogpath = os.path.join(swupdCacheDir(),
            'content/catalogs/filtered_index.sucatalog')
    catalog = FoundationPlist.readPlist(filtered_catalogpath)
    if 'Products' in catalog:
        product_keys = list(catalog['Products'].keys())
        for product_key in product_keys:
            munkicommon.display_status(
                'Caching metadata for product ID %s', product_key)
            product = catalog['Products'][product_key]
            if 'ServerMetadataURL' in product:
                unused_path = replicateURLtoFilesystem(
                    product['ServerMetadataURL'],
                    copy_only_if_missing=True)

            for package in product.get('Packages', []):
                ### not replicating the packages themselves ###
                #if 'URL' in package:
                #    unused_path = replicateURLtoFilesystem(
                #        package['URL'],
                #        copy_only_if_missing=fast_scan)
                if 'MetadataURL' in package:
                    munkicommon.display_status(
                        'Caching package metadata for product ID %s',
                         product_key)
                    unused_path = replicateURLtoFilesystem(
                        package['MetadataURL'],
                        copy_only_if_missing=True)

            distributions = product['Distributions']
            for dist_lang in distributions.keys():
                munkicommon.display_status(
                    'Caching %s distribution for product ID %s',
                    dist_lang, product_key)
                dist_url = distributions[dist_lang]
                unused_path = replicateURLtoFilesystem(
                    dist_url,
                    copy_only_if_missing=True)

        # rewrite URLs to point to local resources
        rewriteURLs(catalog, rewrite_pkg_urls=False)
        # write out the rewritten catalog
        localcatalogpath = os.path.join(swupdCacheDir(),
                                        'content', 'catalogs')
        if not os.path.exists(localcatalogpath):
            try:
                os.makedirs(localcatalogpath)
            except OSError, oserr:
                raise ReplicationError(oserr)
        localcatalogpathname = os.path.join(localcatalogpath,
                                            'local_download.sucatalog')
        FoundationPlist.writePlist(catalog, localcatalogpathname)

        rewriteURLs(catalog, rewrite_pkg_urls=True)
        localcatalogpathname = os.path.join(localcatalogpath,
                                            'local_install.sucatalog')
        FoundationPlist.writePlist(catalog, localcatalogpathname)
Example #55
0
                os.makedirs(destination_path)
            except OSError, err:
                raise ProcessorError("Could not create %s: %s"
                                      % (destination_path, err.strerror))

        pkginfo_name = "%s-%s.plist" % (pkginfo["name"], pkginfo["version"])
        pkginfo_path = os.path.join(destination_path, pkginfo_name)
        index = 0
        while os.path.exists(pkginfo_path):
            index += 1
            pkginfo_name = "%s-%s__%s.plist" % (
                pkginfo["name"], pkginfo["version"], index)
            pkginfo_path = os.path.join(destination_path, pkginfo_name)

        try:
            FoundationPlist.writePlist(pkginfo, pkginfo_path)
        except OSError, err:
            raise ProcessorError("Could not write pkginfo %s: %s"
                                 % (pkginfo_path, err.strerror))
        return pkginfo_path
    
    def main(self):
        
        # Generate arguments for makepkginfo.
        args = ["/usr/local/munki/makepkginfo", self.env["pkg_path"]]
        if self.env.get("munkiimport_pkgname"):
            args.extend(["--pkgname", self.env["munkiimport_pkgname"]])
        if self.env.get("munkiimport_appname"):
            args.extend(["--appname", self.env["munkiimport_appname"]])
        if self.env.get("additional_makepkginfo_options"):
            args.extend(self.env["additional_makepkginfo_options"])
def main():
   # Join paths based on what's user-defined
   pkgsinfo_path=os.path.join(MUNKI_ROOT_PATH, MUNKI_PKGSINFO_DIR_NAME)

   # Check that the path for the pkgsinfo exists
   if not os.path.isdir(pkgsinfo_path):
      logging.error("Your pkgsinfo path is not valid. Please check your MUNKI_ROOT_PATH and MUNKI_PKGSINFO_DIR_NAME values.")
   else:
      # Make sure the relevant folder is writable
      check_folder_writable(pkgsinfo_path)
      
      # Get the current date into a variable
      current_date=datetime.date.today()
      
      # Get the threshold date to compare
      threshold_date=datetime.date.today() + datetime.timedelta(days=-days_between_promotions)
      
      ## Loop through all the pkginfo files and see if they need to be promoted
      for root, dirs, files in os.walk(pkgsinfo_path):
         for dir in dirs:
            # Skip directories starting with a period
            if dir.startswith("."):
               dirs.remove(dir)
         for file in files:
            # Skip files that start with a period
            if file.startswith("."):
               continue
            fullfile = os.path.join(root, file)
            logging.info("Now processing %s" % fullfile)
            pkginfo = FoundationPlist.readPlist(fullfile)
            existing_catalogs = pkginfo['catalogs']
            
            # Test variable for this particular pkginfo
            pkginfo_changed=False
            
            # Check for _metadata key stolen from Jesse Peterson's https://github.com/jessepeterson/autopromoter/blob/master/autopromoter.py
            if '_metadata' not in pkginfo.keys():
               # create _metadata key if it doesn't exist. this is to catch older
               # pkginfos that didn't automatically generate this field
               pkginfo['_metadata'] = {}
               logging.info("Creating _metadata dictionary for %s" % fullfile)
               pkginfo_changed=True
            # Either way, also make sure there is a promotion date that exists, too
            # The idea here is that if MunkiAutopromote didn't already put in a promotion date, we should put one in now and then it'll be caught the next time around
            if 'catalog_promotion_date' not in pkginfo['_metadata']:
               pkginfo['_metadata']['catalog_promotion_date'] = str(current_date)
               logging.info("Creating promotion date of %s for %s" % (current_date, fullfile))
               pkginfo_changed=True
            # No point checking if it was X days ago if we just put it in
            else:
               # See if the last promotion date was days_between_promotion days ago or more
               last_catalog_promotion_date=datetime.datetime.strptime(pkginfo['_metadata']['catalog_promotion_date'], "%Y-%m-%d").date()
               # In addition to comparing dates, we also don't want to bother with any pkginfo files that have the full set of catalogs already
               if last_catalog_promotion_date <= threshold_date and sorted(existing_catalogs)!=sorted(promotion_order):
                  logging.info("Last promotion date was more than %s days ago for %s" % (days_between_promotions, fullfile))
                  # Promote!
                  for catalog in promotion_order:
                     if catalog not in existing_catalogs:
                        logging.info("Adding %s catalog for %s" % (catalog, fullfile))
                        # Add the catalog to the list of existing ones
                        pkginfo['catalogs'].append(catalog)
                        # Update the promotion date
                        pkginfo['_metadata']['catalog_promotion_date']=str(current_date)
                        pkginfo_changed=True
                        # Break the for loop; otherwise, it will promote beyond just this one
                        break
            if pkginfo_changed:
               # Write the changes back
               FoundationPlist.writePlist(pkginfo, fullfile)

      # I guess we could have a variable that checks to see if there were any changes before running makecatalogs. For now, it really doesn't hurt to run it, even if there weren't any changes.
      if os.path.exists(makecatalogs):
         logging.info("Running makecatalogs")
         os.system(makecatalogs)
      else:
         logging.error("%s could not be found. When you have a chance, run makecatalogs on your Munki repo to have the changes reflected." % makecatalogs)
Example #57
0
def main():
    parser = optparse.OptionParser()
    parser.set_usage(
        """usage: %prog [--dsobject DSOBJECT | --plist PLIST] 
                       [--identifier IDENTIFIER | --identifier-from-profile PATH] [options]
       One of '--dsobject' or '--plist' must be specified, and only one identifier option.
       Run '%prog --help' for more information.""")

    # Required options
    parser.add_option('--dsobject', '-d', metavar='DSOBJECT',
        help="""Directory Services object from which to convert MCX data.
Examples: /Local/Default/Computers/foo
          /LDAPv3/some_ldap_server/ComputerGroups/bar""")
    parser.add_option('--plist', '-p', action="append", metavar='PLIST_FILE',
        help="""Path to a plist to be added as a profile payload.
Can be specified multiple times.""")
    parser.add_option('--identifier', '-i',
        action="store",
        help="""Top-level payload identifier. This is used to uniquely identify a profile.
A profile can be removed using this identifier using the 'profiles' command and the '-R -p' options.""")
    parser.add_option('--identifier-from-profile', '-f',
        action="store",
        metavar="PATH",
        help="""Path to an existing .mobileconfig file from which to copy the identifier,
as opposed to specifying it with the --identifier option.""")

    # Optionals
    parser.add_option('--removal-allowed', '-r',
        action="store_true",
        default=False,
        help="""Specifies that the profile can be removed.""")
    parser.add_option('--organization', '-g',
        action="store",
        default="",
        help="Cosmetic name for the organization deploying the profile.")
    parser.add_option('--output', '-o',
        action="store",
        metavar='PATH',
        help="Output path for profile. Defaults to 'identifier.mobileconfig' in the current working directory.")

    # Plist-specific
    plist_options = optparse.OptionGroup(parser,
        title="Plist-specific options",
        description="""These options are useful only in conjunction with --plist.
If multiple plists are supplied, they are applied to all, not on a
per-plist basis.""")

    parser.add_option_group(plist_options)

    plist_options.add_option('--manage', '-m',
        action="store",
        help="Management frequency - Once, Often or Always. Defaults to Always.")

    options, args = parser.parse_args()

    if len(args):
        parser.print_usage()
        sys.exit(-1)

    if options.dsobject and options.plist:
        parser.print_usage()
        errorAndExit("Error: The '--dsobject' and '--plist' options are mutually exclusive.")

    if options.dsobject and options.manage:
        print options.manage
        parser.print_usage()
        errorAndExit("Error: The '--manage' option is used only in conjunction with '--plist'. DS Objects already contain this information.")

    if (not options.identifier and not options.identifier_from_profile) or \
    (options.identifier and options.identifier_from_profile):
        parser.print_usage()
        sys.exit(-1)

    if options.identifier:
        identifier = options.identifier
    elif options.identifier_from_profile:
        if not os.path.exists(options.identifier_from_profile):
            errorAndExit("Error reading a profile at path %s" % options.identifier_from_profile)
        identifier = getIdentifierFromProfile(options.identifier_from_profile)

    if options.plist:
        if not options.manage:
            manage = 'Always'
        else:
            manage = options.manage

    if options.output:
        output_file = options.output
    else:
        output_file = os.path.join(os.getcwd(), identifier + '.mobileconfig')

    newPayload = PayloadDict(identifier=identifier,
        removal_allowed=options.removal_allowed,
        organization=options.organization)

    if options.plist:
        for plist_path in options.plist:
            if not os.path.exists(plist_path):
                errorAndExit("No plist file exists at %s" % plist_path)
            try:
                source_data = FoundationPlist.readPlist(plist_path)
            except FoundationPlist.FoundationPlistException:
                errorAndExit("Error decoding plist data in file %s" % plist_path)

            source_domain = getDomainFromPlist(plist_path)
            newPayload.addPayloadFromPlistContents(source_data,
                source_domain['name'],
                manage,
                is_byhost=source_domain['is_byhost'])
    if options.dsobject:
        mcx_data = getMCXData(options.dsobject)
        newPayload.addPayloadFromMCX(mcx_data)

    FoundationPlist.writePlist(newPayload.data, output_file)
Example #58
0
def main():
  """Main function."""
  wait_for_network()
  if not os.path.exists('/Applications/AutoDMG.app/Contents/MacOS/AutoDMG'):
    print "AutoDMG not at expected path in /Applications, quitting!"
    sys.exit(1)
  parser = argparse.ArgumentParser(
    description='Built a precached AutoDMG image.')
  parser.add_argument(
    '-c', '--catalog', help='Catalog name. Defaults to "prod".',
    default='prod')
  parser.add_argument(
    '-m', '--manifest', help='Manifest name. Defaults to "prod".',
    default='prod')
  parser.add_argument(
    '-o', '--output', help='Path to DMG to create.',
    default='AutoDMG_full.hfs.dmg')
  parser.add_argument(
    '--cache', help='Path to local cache to store files.'
                    ' Defaults to "/Library/AutoDMG"',
    default='/Library/AutoDMG')
  parser.add_argument(
    '-d', '--download', help='Force a redownload of all files.',
    action='store_true', default=False)
  parser.add_argument(
    '-l', '--logpath', help='Path to log files for AutoDMG.',
    default='/Library/AutoDMG/logs/')
  parser.add_argument(
    '--custom', help='Path to place custom resources. Defaults to '
                     '/Library/Managed Installs/client_resources/.',
    default='/Library/Managed Installs/client_resources/')
  parser.add_argument(
    '-s', '--source', help='Path to base OS installer.',
    default='/Applications/Install OS X El Capitan.app')
  parser.add_argument(
    '-v', '--volumename', help='Name of volume after imaging. '
                               'Defaults to "Macintosh HD."',
    default='Macintosh HD')
  parser.add_argument(
    '--loglevel', help='Set loglevel between 1 and 7. Defaults to 6.',
    choices=range(1, 8), default=6, type=int)
  parser.add_argument(
    '--dsrepo', help='Path to DeployStudio repo. ')
  parser.add_argument(
    '--noicons', help="Don't cache icons.",
    action='store_true', default=False)
  parser.add_argument(
    '-u', '--update', help='Update the profiles plist.',
    action='store_true', default=False)
  parser.add_argument(
    '--disableupdates', help='Disable updates to built image via AutoDMG',
    action='store_false', default=True)
  parser.add_argument(
    '--movefile', help="Path to move file to after building.")
  parser.add_argument(
    '--extras', help='Path to JSON file containing additions '
                     ' and exceptions lists.')
  args = parser.parse_args()

  print "Using Munki repo: %s" % MUNKI_URL
  global CACHE
  CACHE = args.cache

  if "https" in MUNKI_URL and not BASIC_AUTH:
    print >> sys.stderr, "Error: HTTPS was used but no auth provided."
    sys.exit(2)

  print time.strftime("%c")
  print "Starting run..."
  # Create the local cache directories
  dir_struct = {
    'additions': os.path.join(CACHE, 'additions'),
    'catalogs': os.path.join(CACHE, 'catalogs'),
    'downloads': os.path.join(CACHE, 'downloads'),
    'exceptions': os.path.join(CACHE, 'exceptions'),
    'manifests': os.path.join(CACHE, 'manifests'),
    'icons': os.path.join(CACHE, 'icons'),
    'logs': os.path.join(CACHE, 'logs')
  }
  path_creation = prepare_local_paths(dir_struct.values())
  if path_creation > 0:
    print "Error setting up local cache directories."
    sys.exit(-1)

  # These are necessary to populate the globals used in updatecheck
  keychain_obj = keychain.MunkiKeychain()
  manifestpath = updatecheck.getPrimaryManifest(args.manifest)
  updatecheck.getPrimaryManifestCatalogs(args.manifest)
  updatecheck.getCatalogs([args.catalog])

  installinfo = {}
  installinfo['processed_installs'] = []
  installinfo['processed_uninstalls'] = []
  installinfo['managed_updates'] = []
  installinfo['optional_installs'] = []
  installinfo['managed_installs'] = []
  installinfo['removals'] = []
  updatecheck.processManifestForKey(manifestpath, 'managed_installs',
                                    installinfo)
  # installinfo['managed_installs'] now contains a list of all managed_installs
  install_list = []
  for item in installinfo['managed_installs']:
    detail = updatecheck.getItemDetail(item['name'], [args.catalog])
    if detail:
      install_list.append(detail)

  # Prior to downloading anything, populate the lists
  additions_list = list()
  item_list = list()
  except_list = list()
  exceptions = list()
  # exceptions[] is a list of exceptions specified by the extras file
  # except_list is a list of files downloaded into the exceptions dir
  if args.extras:
    # Additions are downloaded & added to the additions_list
    # Exceptions are added to the exceptions list,
    # Downloaded exceptions are added to the except_list list.
    handle_extras(
      args.extras,
      dir_struct['exceptions'],
      dir_struct['additions'],
      args.download,
      exceptions,
      except_list,
      additions_list
    )

  # Check for managed_install items and download them
  process_managed_installs(install_list, exceptions,
                           except_list, item_list,
                           dir_struct['exceptions'],
                           dir_struct['downloads'],
                           args.download)

  # Icon handling
  if not args.noicons:
    # Get icons for Managed Updates, Optional Installs and removals
    updatecheck.processManifestForKey(manifestpath, 'managed_updates',
                                    installinfo)
    updatecheck.processManifestForKey(manifestpath, 'managed_uninstalls',
                                    installinfo)
    updatecheck.processManifestForKey(manifestpath, 'optional_installs',
                                    installinfo)
    icon_pkg_file = handle_icons(dir_struct['icons'], installinfo)
  if icon_pkg_file:
    additions_list.extend([icon_pkg_file])

  # Munki custom resources handling
  custom_pkg_file = handle_custom(args.custom)
  if custom_pkg_file:
    additions_list.extend([custom_pkg_file])

  # Clean up cache of items we don't recognize
  cleanup_local_cache(item_list, dir_struct['downloads'])
  cleanup_local_cache(except_list, dir_struct['exceptions'])

  # Build the package of exceptions, if any
  if except_list:
    pkg_output_file = os.path.join(CACHE, 'munki_cache.pkg')
    success = build_pkg(
      dir_struct['exceptions'],
      'munki_cache',
      'com.facebook.cpe.munki_exceptions',
      '/Library/Managed Installs/Cache',
      CACHE,
      'Building exceptions package'
    )
    if success:
      additions_list.extend([pkg_output_file])
    else:
      print "Failed to build exceptions package!"

  loglevel = str(args.loglevel)

  # Run any extra code or package builds
  sys.stdout.flush()
  pkg_list = autodmg_org.run_unique_code(args)
  additions_list.extend(pkg_list)

  # Now that cache is downloaded, let's add it to the AutoDMG template.
  print "Creating AutoDMG-full.adtmpl."
  templatepath = os.path.join(CACHE, 'AutoDMG-full.adtmpl')

  plist = dict()
  plist["ApplyUpdates"] = args.disableupdates
  plist["SourcePath"] = args.source
  plist["TemplateFormat"] = "1.0"
  plist["VolumeName"] = args.volumename
  plist["AdditionalPackages"] = [
    os.path.join(
      dir_struct['downloads'], f
    ) for f in os.listdir(
      dir_struct['downloads']
    ) if (not f == '.DS_Store') and (f not in additions_list)
  ]

  if additions_list:
    plist["AdditionalPackages"].extend(additions_list)

  # Complete the AutoDMG-full.adtmpl template
  plistlib.writePlist(plist, templatepath)
  autodmg_cmd = [
    '/Applications/AutoDMG.app/Contents/MacOS/AutoDMG'
  ]
  if os.getuid() == 0:
    # We are running as root
    print "Running as root."
    autodmg_cmd.append('--root')
  if args.update:
    # Update the profiles plist too
    print "Updating UpdateProfiles.plist..."
    cmd = autodmg_cmd + ['update']
    run(cmd)

  logfile = os.path.join(args.logpath, 'build.log')
  # Now kick off the AutoDMG build
  dmg_output_path = os.path.join(CACHE, args.output)
  sys.stdout.flush()
  print "Building disk image..."
  if os.path.isfile(dmg_output_path):
    os.remove(dmg_output_path)
  cmd = autodmg_cmd + [
    '-L', loglevel,
    '-l', logfile,
    'build', templatepath,
    '--download-updates',
    '-o', dmg_output_path]
  print "Full command: %s" % cmd
  run(cmd)
  if not os.path.isfile(dmg_output_path):
    print >> sys.stderr, "Failed to create disk image!"
    sys.exit(1)

  # Check the Deploystudio masters to see if this image already exists
  sys.stdout.flush()
  if args.dsrepo:
    populate_ds_repo(dmg_output_path, args.dsrepo)

  if args.movefile:
    move_file(dmg_output_path, args.movefile)

  print "Ending run."
  print time.strftime("%c")