Esempio n. 1
0
def wait_for_network():
    """Wait until network access is up."""
    # Wait up to 180 seconds for scutil dynamic store to register DNS
    cmd = ['/usr/sbin/scutil', '-w', 'State:/Network/Global/DNS', '-t', '180']
    if run(cmd) != 0:
        print "Network did not come up after 3 minutes. Exiting!"
        sys.exit(1)
Esempio n. 2
0
def wait_for_network():
  """Wait until network access is up."""
  # Wait up to 180 seconds for scutil dynamic store to register DNS
  cmd = [
    '/usr/sbin/scutil',
    '-w', 'State:/Network/Global/DNS',
    '-t', '180'
  ]
  if run(cmd) != 0:
    print "Network did not come up after 3 minutes. Exiting!"
    sys.exit(1)
Esempio n. 3
0
def build_bare_dmg(source, cache, logpath, loglevel, repo_path):
  """Build a bare OS DMG for Donation/bare usage."""
  dmg_output_path = os.path.join(cache, 'Bare.hfs.dmg')
  if os.path.isfile(dmg_output_path):
    print "Donation image already found, not building.\n"
    return
  print "Creating AutoDMG-donation.adtmpl."
  templatepath = os.path.join(cache, 'AutoDMG-bare.adtmpl')

  plist = dict()
  plist["ApplyUpdates"] = True
  plist["SourcePath"] = source
  plist["TemplateFormat"] = "1.0"
  plist["VolumeName"] = "Macintosh HD"

  # Complete the AutoDMG-donation.adtmpl template
  plistlib.writePlist(plist, templatepath)
  autodmg_cmd = [
    '/Applications/AutoDMG.app/Contents/MacOS/AutoDMG'
  ]
  if os.getuid() == 0:
    # We are running as root
    print "Running as root."
    autodmg_cmd.append('--root')

  logfile = os.path.join(logpath, 'bare.log')

  # Now kick off the AutoDMG build
  print "Building bare image..."
  if os.path.isfile(dmg_output_path):
    os.remove(dmg_output_path)
  cmd = autodmg_cmd + [
    '-L', loglevel,
    '-l', logfile,
    'build', templatepath,
    '--download-updates',
    '-o', dmg_output_path]
  run(cmd)
  print "Moving bare image to DS Repo."
  populate_ds_repo(dmg_output_path, repo_path)
Esempio n. 4
0
def build_bare_dmg(source, cache, logpath, loglevel, repo_path):
    """Build a bare OS DMG for Donation/bare usage."""
    dmg_output_path = os.path.join(cache, 'Bare.hfs.dmg')
    if os.path.isfile(dmg_output_path):
        print "Donation image already found, not building.\n"
        return
    print "Creating AutoDMG-donation.adtmpl."
    templatepath = os.path.join(cache, 'AutoDMG-bare.adtmpl')

    plist = dict()
    plist["ApplyUpdates"] = True
    plist["SourcePath"] = source
    plist["TemplateFormat"] = "1.0"
    plist["VolumeName"] = "Macintosh HD"

    # Complete the AutoDMG-donation.adtmpl template
    plistlib.writePlist(plist, templatepath)
    autodmg_cmd = ['/Applications/AutoDMG.app/Contents/MacOS/AutoDMG']
    if os.getuid() == 0:
        # We are running as root
        print "Running as root."
        autodmg_cmd.append('--root')

    logfile = os.path.join(logpath, 'bare.log')

    # Now kick off the AutoDMG build
    print "Building bare image..."
    if os.path.isfile(dmg_output_path):
        os.remove(dmg_output_path)
    cmd = autodmg_cmd + [
        '-L', loglevel, '-l', logfile, 'build', templatepath,
        '--download-updates', '-o', dmg_output_path
    ]
    run(cmd)
    print "Moving bare image to DS Repo."
    populate_ds_repo(dmg_output_path, repo_path)
Esempio n. 5
0
def main():
    """Main function."""
    wait_for_network()
    if not os.path.exists('/Applications/AutoDMG.app/Contents/MacOS/AutoDMG'):
        print "AutoDMG not at expected path in /Applications, quitting!"
        sys.exit(1)
    parser = argparse.ArgumentParser(
        description='Built a precached AutoDMG image.')
    parser.add_argument('-c',
                        '--catalog',
                        help='Catalog name. Defaults to "prod".',
                        default='prod')
    parser.add_argument('-m',
                        '--manifest',
                        help='Manifest name. Defaults to "prod".',
                        default='prod')
    parser.add_argument('-o',
                        '--output',
                        help='Path to DMG to create.',
                        default='AutoDMG_full.hfs.dmg')
    parser.add_argument('--cache',
                        help='Path to local cache to store files.'
                        ' Defaults to "/Library/AutoDMG"',
                        default='/Library/AutoDMG')
    parser.add_argument('-d',
                        '--download',
                        help='Force a redownload of all files.',
                        action='store_true',
                        default=False)
    parser.add_argument('-l',
                        '--logpath',
                        help='Path to log files for AutoDMG.',
                        default='/Library/AutoDMG/logs/')
    parser.add_argument('--custom',
                        help='Path to place custom resources. Defaults to '
                        '/Library/Managed Installs/client_resources/.',
                        default='/Library/Managed Installs/client_resources/')
    parser.add_argument('-s',
                        '--source',
                        help='Path to base OS installer.',
                        default='/Applications/Install OS X El Capitan.app')
    parser.add_argument('-v',
                        '--volumename',
                        help='Name of volume after imaging. '
                        'Defaults to "Macintosh HD."',
                        default='Macintosh HD')
    parser.add_argument('--loglevel',
                        help='Set loglevel between 1 and 7. Defaults to 6.',
                        choices=range(1, 8),
                        default=6,
                        type=int)
    parser.add_argument('--dsrepo', help='Path to DeployStudio repo. ')
    parser.add_argument('--noicons',
                        help="Don't cache icons.",
                        action='store_true',
                        default=False)
    parser.add_argument('-u',
                        '--update',
                        help='Update the profiles plist.',
                        action='store_true',
                        default=False)
    parser.add_argument('--disableupdates',
                        help='Disable updates to built image via AutoDMG',
                        action='store_false',
                        default=True)
    parser.add_argument('--movefile',
                        help="Path to move file to after building.")
    parser.add_argument('--extras',
                        help='Path to JSON file containing additions '
                        ' and exceptions lists.')
    args = parser.parse_args()

    print "Using Munki repo: %s" % MUNKI_URL
    global CACHE
    CACHE = args.cache

    if "https" in MUNKI_URL and not BASIC_AUTH:
        print >> sys.stderr, "Error: HTTPS was used but no auth provided."
        sys.exit(2)

    print time.strftime("%c")
    print "Starting run..."
    # Create the local cache directories
    dir_struct = {
        'additions': os.path.join(CACHE, 'additions'),
        'catalogs': os.path.join(CACHE, 'catalogs'),
        'downloads': os.path.join(CACHE, 'downloads'),
        'exceptions': os.path.join(CACHE, 'exceptions'),
        'manifests': os.path.join(CACHE, 'manifests'),
        'icons': os.path.join(CACHE, 'icons'),
        'logs': os.path.join(CACHE, 'logs')
    }
    path_creation = prepare_local_paths(dir_struct.values())
    if path_creation > 0:
        print "Error setting up local cache directories."
        sys.exit(-1)

    # These are necessary to populate the globals used in updatecheck
    keychain_obj = keychain.MunkiKeychain()
    manifestpath = updatecheck.getPrimaryManifest(args.manifest)
    updatecheck.getPrimaryManifestCatalogs(args.manifest)
    updatecheck.getCatalogs([args.catalog])

    installinfo = {}
    installinfo['processed_installs'] = []
    installinfo['processed_uninstalls'] = []
    installinfo['managed_updates'] = []
    installinfo['optional_installs'] = []
    installinfo['managed_installs'] = []
    installinfo['removals'] = []
    updatecheck.processManifestForKey(manifestpath, 'managed_installs',
                                      installinfo)
    # installinfo['managed_installs'] now contains a list of all managed_installs
    install_list = []
    for item in installinfo['managed_installs']:
        detail = updatecheck.getItemDetail(item['name'], [args.catalog])
        if detail:
            install_list.append(detail)

    # Prior to downloading anything, populate the lists
    additions_list = list()
    item_list = list()
    except_list = list()
    exceptions = list()
    # exceptions[] is a list of exceptions specified by the extras file
    # except_list is a list of files downloaded into the exceptions dir
    if args.extras:
        # Additions are downloaded & added to the additions_list
        # Exceptions are added to the exceptions list,
        # Downloaded exceptions are added to the except_list list.
        handle_extras(args.extras, dir_struct['exceptions'],
                      dir_struct['additions'], args.download, exceptions,
                      except_list, additions_list)

    # Check for managed_install items and download them
    process_managed_installs(install_list, exceptions, except_list, item_list,
                             dir_struct['exceptions'], dir_struct['downloads'],
                             args.download)

    # Icon handling
    if not args.noicons:
        # Get icons for Managed Updates, Optional Installs and removals
        updatecheck.processManifestForKey(manifestpath, 'managed_updates',
                                          installinfo)
        updatecheck.processManifestForKey(manifestpath, 'managed_uninstalls',
                                          installinfo)
        updatecheck.processManifestForKey(manifestpath, 'optional_installs',
                                          installinfo)
        icon_pkg_file = handle_icons(dir_struct['icons'], installinfo)
    if icon_pkg_file:
        additions_list.extend([icon_pkg_file])

    # Munki custom resources handling
    custom_pkg_file = handle_custom(args.custom)
    if custom_pkg_file:
        additions_list.extend([custom_pkg_file])

    # Clean up cache of items we don't recognize
    cleanup_local_cache(item_list, dir_struct['downloads'])
    cleanup_local_cache(except_list, dir_struct['exceptions'])

    # Build the package of exceptions, if any
    if except_list:
        pkg_output_file = os.path.join(CACHE, 'munki_cache.pkg')
        success = build_pkg(dir_struct['exceptions'], 'munki_cache',
                            'com.facebook.cpe.munki_exceptions',
                            '/Library/Managed Installs/Cache', CACHE,
                            'Building exceptions package')
        if success:
            additions_list.extend([pkg_output_file])
        else:
            print "Failed to build exceptions package!"

    loglevel = str(args.loglevel)

    # Run any extra code or package builds
    sys.stdout.flush()
    pkg_list = autodmg_org.run_unique_code(args)
    additions_list.extend(pkg_list)

    # Now that cache is downloaded, let's add it to the AutoDMG template.
    print "Creating AutoDMG-full.adtmpl."
    templatepath = os.path.join(CACHE, 'AutoDMG-full.adtmpl')

    plist = dict()
    plist["ApplyUpdates"] = args.disableupdates
    plist["SourcePath"] = args.source
    plist["TemplateFormat"] = "1.0"
    plist["VolumeName"] = args.volumename
    plist["AdditionalPackages"] = [
        os.path.join(dir_struct['downloads'], f)
        for f in os.listdir(dir_struct['downloads'])
        if (not f == '.DS_Store') and (f not in additions_list)
    ]

    if additions_list:
        plist["AdditionalPackages"].extend(additions_list)

    # Complete the AutoDMG-full.adtmpl template
    plistlib.writePlist(plist, templatepath)
    autodmg_cmd = ['/Applications/AutoDMG.app/Contents/MacOS/AutoDMG']
    if os.getuid() == 0:
        # We are running as root
        print "Running as root."
        autodmg_cmd.append('--root')
    if args.update:
        # Update the profiles plist too
        print "Updating UpdateProfiles.plist..."
        cmd = autodmg_cmd + ['update']
        run(cmd)

    logfile = os.path.join(args.logpath, 'build.log')
    # Now kick off the AutoDMG build
    dmg_output_path = os.path.join(CACHE, args.output)
    sys.stdout.flush()
    print "Building disk image..."
    if os.path.isfile(dmg_output_path):
        os.remove(dmg_output_path)
    cmd = autodmg_cmd + [
        '-L', loglevel, '-l', logfile, 'build', templatepath,
        '--download-updates', '-o', dmg_output_path
    ]
    print "Full command: %s" % cmd
    run(cmd)
    if not os.path.isfile(dmg_output_path):
        print >> sys.stderr, "Failed to create disk image!"
        sys.exit(1)

    # Check the Deploystudio masters to see if this image already exists
    sys.stdout.flush()
    if args.dsrepo:
        populate_ds_repo(dmg_output_path, args.dsrepo)

    if args.movefile:
        move_file(dmg_output_path, args.movefile)

    print "Ending run."
    print time.strftime("%c")
Esempio n. 6
0
def main():
  """Main function."""
  wait_for_network()
  if not os.path.exists('/Applications/AutoDMG.app/Contents/MacOS/AutoDMG'):
    print "AutoDMG not at expected path in /Applications, quitting!"
    sys.exit(1)
  parser = argparse.ArgumentParser(
    description='Built a precached AutoDMG image.')
  parser.add_argument(
    '-c', '--catalog', help='Catalog name. Defaults to "prod".',
    default='prod')
  parser.add_argument(
    '-m', '--manifest', help='Manifest name. Defaults to "prod".',
    default='prod')
  parser.add_argument(
    '-o', '--output', help='Path to DMG to create.',
    default='AutoDMG_full.hfs.dmg')
  parser.add_argument(
    '--cache', help='Path to local cache to store files.'
                    ' Defaults to "/Library/AutoDMG"',
    default='/Library/AutoDMG')
  parser.add_argument(
    '-d', '--download', help='Force a redownload of all files.',
    action='store_true', default=False)
  parser.add_argument(
    '-l', '--logpath', help='Path to log files for AutoDMG.',
    default='/Library/AutoDMG/logs/')
  parser.add_argument(
    '--custom', help='Path to place custom resources. Defaults to '
                     '/Library/Managed Installs/client_resources/.',
    default='/Library/Managed Installs/client_resources/')
  parser.add_argument(
    '-s', '--source', help='Path to base OS installer.',
    default='/Applications/Install OS X El Capitan.app')
  parser.add_argument(
    '-v', '--volumename', help='Name of volume after imaging. '
                               'Defaults to "Macintosh HD."',
    default='Macintosh HD')
  parser.add_argument(
    '--loglevel', help='Set loglevel between 1 and 7. Defaults to 6.',
    choices=range(1, 8), default=6, type=int)
  parser.add_argument(
    '--dsrepo', help='Path to DeployStudio repo. ')
  parser.add_argument(
    '--noicons', help="Don't cache icons.",
    action='store_true', default=False)
  parser.add_argument(
    '-u', '--update', help='Update the profiles plist.',
    action='store_true', default=False)
  parser.add_argument(
    '--disableupdates', help='Disable updates to built image via AutoDMG',
    action='store_false', default=True)
  parser.add_argument(
    '--movefile', help="Path to move file to after building.")
  parser.add_argument(
    '--extras', help='Path to JSON file containing additions '
                     ' and exceptions lists.')
  args = parser.parse_args()

  print "Using Munki repo: %s" % MUNKI_URL
  global CACHE
  CACHE = args.cache

  if "https" in MUNKI_URL and not BASIC_AUTH:
    print >> sys.stderr, "Error: HTTPS was used but no auth provided."
    sys.exit(2)

  print time.strftime("%c")
  print "Starting run..."
  # Create the local cache directories
  dir_struct = {
    'additions': os.path.join(CACHE, 'additions'),
    'catalogs': os.path.join(CACHE, 'catalogs'),
    'downloads': os.path.join(CACHE, 'downloads'),
    'exceptions': os.path.join(CACHE, 'exceptions'),
    'manifests': os.path.join(CACHE, 'manifests'),
    'icons': os.path.join(CACHE, 'icons'),
    'logs': os.path.join(CACHE, 'logs')
  }
  path_creation = prepare_local_paths(dir_struct.values())
  if path_creation > 0:
    print "Error setting up local cache directories."
    sys.exit(-1)

  # These are necessary to populate the globals used in updatecheck
  keychain_obj = keychain.MunkiKeychain()
  manifestpath = updatecheck.getPrimaryManifest(args.manifest)
  updatecheck.getPrimaryManifestCatalogs(args.manifest)
  updatecheck.getCatalogs([args.catalog])

  installinfo = {}
  installinfo['processed_installs'] = []
  installinfo['processed_uninstalls'] = []
  installinfo['managed_updates'] = []
  installinfo['optional_installs'] = []
  installinfo['managed_installs'] = []
  installinfo['removals'] = []
  updatecheck.processManifestForKey(manifestpath, 'managed_installs',
                                    installinfo)
  # installinfo['managed_installs'] now contains a list of all managed_installs
  install_list = []
  for item in installinfo['managed_installs']:
    detail = updatecheck.getItemDetail(item['name'], [args.catalog])
    if detail:
      install_list.append(detail)

  # Prior to downloading anything, populate the lists
  additions_list = list()
  item_list = list()
  except_list = list()
  exceptions = list()
  # exceptions[] is a list of exceptions specified by the extras file
  # except_list is a list of files downloaded into the exceptions dir
  if args.extras:
    # Additions are downloaded & added to the additions_list
    # Exceptions are added to the exceptions list,
    # Downloaded exceptions are added to the except_list list.
    handle_extras(
      args.extras,
      dir_struct['exceptions'],
      dir_struct['additions'],
      args.download,
      exceptions,
      except_list,
      additions_list
    )

  # Check for managed_install items and download them
  process_managed_installs(install_list, exceptions,
                           except_list, item_list,
                           dir_struct['exceptions'],
                           dir_struct['downloads'],
                           args.download)

  # Icon handling
  if not args.noicons:
    # Get icons for Managed Updates, Optional Installs and removals
    updatecheck.processManifestForKey(manifestpath, 'managed_updates',
                                    installinfo)
    updatecheck.processManifestForKey(manifestpath, 'managed_uninstalls',
                                    installinfo)
    updatecheck.processManifestForKey(manifestpath, 'optional_installs',
                                    installinfo)
    icon_pkg_file = handle_icons(dir_struct['icons'], installinfo)
  if icon_pkg_file:
    additions_list.extend([icon_pkg_file])

  # Munki custom resources handling
  custom_pkg_file = handle_custom(args.custom)
  if custom_pkg_file:
    additions_list.extend([custom_pkg_file])

  # Clean up cache of items we don't recognize
  cleanup_local_cache(item_list, dir_struct['downloads'])
  cleanup_local_cache(except_list, dir_struct['exceptions'])

  # Build the package of exceptions, if any
  if except_list:
    pkg_output_file = os.path.join(CACHE, 'munki_cache.pkg')
    success = build_pkg(
      dir_struct['exceptions'],
      'munki_cache',
      'com.facebook.cpe.munki_exceptions',
      '/Library/Managed Installs/Cache',
      CACHE,
      'Building exceptions package'
    )
    if success:
      additions_list.extend([pkg_output_file])
    else:
      print "Failed to build exceptions package!"

  loglevel = str(args.loglevel)

  # Run any extra code or package builds
  sys.stdout.flush()
  pkg_list = autodmg_org.run_unique_code(args)
  additions_list.extend(pkg_list)

  # Now that cache is downloaded, let's add it to the AutoDMG template.
  print "Creating AutoDMG-full.adtmpl."
  templatepath = os.path.join(CACHE, 'AutoDMG-full.adtmpl')

  plist = dict()
  plist["ApplyUpdates"] = args.disableupdates
  plist["SourcePath"] = args.source
  plist["TemplateFormat"] = "1.0"
  plist["VolumeName"] = args.volumename
  plist["AdditionalPackages"] = [
    os.path.join(
      dir_struct['downloads'], f
    ) for f in os.listdir(
      dir_struct['downloads']
    ) if (not f == '.DS_Store') and (f not in additions_list)
  ]

  if additions_list:
    plist["AdditionalPackages"].extend(additions_list)

  # Complete the AutoDMG-full.adtmpl template
  plistlib.writePlist(plist, templatepath)
  autodmg_cmd = [
    '/Applications/AutoDMG.app/Contents/MacOS/AutoDMG'
  ]
  if os.getuid() == 0:
    # We are running as root
    print "Running as root."
    autodmg_cmd.append('--root')
  if args.update:
    # Update the profiles plist too
    print "Updating UpdateProfiles.plist..."
    cmd = autodmg_cmd + ['update']
    run(cmd)

  logfile = os.path.join(args.logpath, 'build.log')
  # Now kick off the AutoDMG build
  dmg_output_path = os.path.join(CACHE, args.output)
  sys.stdout.flush()
  print "Building disk image..."
  if os.path.isfile(dmg_output_path):
    os.remove(dmg_output_path)
  cmd = autodmg_cmd + [
    '-L', loglevel,
    '-l', logfile,
    'build', templatepath,
    '--download-updates',
    '-o', dmg_output_path]
  print "Full command: %s" % cmd
  run(cmd)
  if not os.path.isfile(dmg_output_path):
    print >> sys.stderr, "Failed to create disk image!"
    sys.exit(1)

  # Check the Deploystudio masters to see if this image already exists
  sys.stdout.flush()
  if args.dsrepo:
    populate_ds_repo(dmg_output_path, args.dsrepo)

  if args.movefile:
    move_file(dmg_output_path, args.movefile)

  print "Ending run."
  print time.strftime("%c")
Esempio n. 7
0
def main():
    """Main function."""
    wait_for_network()
    if not os.path.exists('/Applications/AutoDMG.app/Contents/MacOS/AutoDMG'):
        print("AutoDMG not at expected path in /Applications, quitting!")
        sys.exit(1)
    parser = argparse.ArgumentParser(
        description='Built a precached AutoDMG image.')
    parser.add_argument(
        '-m', '--manifest', help='Manifest name. Defaults to "prod".',
        default='prod')
    parser.add_argument(
        '-o', '--output', help='Path to DMG to create.',
        default='AutoDMG_full.hfs.dmg')
    parser.add_argument(
        '--cache', help=(
            'Path to local cache to store files. '
            'Defaults to "/Library/AutoDMG"'),
        default='/Library/AutoDMG')
    parser.add_argument(
        '-d', '--download', help='Force a redownload of all files.',
        action='store_true', default=False)
    parser.add_argument(
        '-l', '--logpath', help='Path to log files for AutoDMG.',
        default='/Library/AutoDMG/logs/')
    parser.add_argument(
        '-s', '--source', help='Path to base OS installer.',
        default='/Applications/Install OS X El Capitan.app')
    parser.add_argument(
        '-n', '--volumename', help=(
            'Name of volume after imaging. '
            'Defaults to "Macintosh HD."'),
        default='Macintosh HD')
    parser.add_argument(
        '-S', '--volumesize', help=(
            'Size of volume after imaging. '
            'Defaults to 120'),
        default=120)
    parser.add_argument(
        '--loglevel', help='Set loglevel between 1 and 7. Defaults to 6.',
        choices=range(1, 8), type=int, default=6)
    parser.add_argument(
        '--dsrepo', help='Path to DeployStudio repo. ')
    parser.add_argument(
        '--noicons', help="Don't cache icons.",
        action='store_true', default=False)
    parser.add_argument(
        '-u', '--update', help='Update the profiles plist.',
        action='store_true', default=False)
    parser.add_argument(
        '--extras', help=(
            'Path to JSON file containing additions '
            ' and exceptions lists.')
    )
    args = parser.parse_args()

    print("Using Munki repo: %s" % MUNKI_URL)
    global CACHE
    CACHE = args.cache

    print(time.strftime("%c"))
    print("Starting run...")
    # Create the local cache directories
    dir_struct = {
        'additions': os.path.join(CACHE, 'additions'),
        'catalogs': os.path.join(CACHE, 'catalogs'),
        'downloads': os.path.join(CACHE, 'downloads'),
        'exceptions': os.path.join(CACHE, 'exceptions'),
        'exceptions_pkgs': os.path.join(CACHE, 'exceptions_pkgs'),
        'manifests': os.path.join(CACHE, 'manifests'),
        'icons': os.path.join(CACHE, 'icons'),
        'logs': os.path.join(CACHE, 'logs'),
        'client_resources': os.path.join(CACHE, 'client_resources'),
    }
    path_creation = prepare_local_paths(dir_struct.values())
    if path_creation > 0:
        print("Error setting up local cache directories.")
        sys.exit(-1)

    # Populate the list of installs based on the manifest
    install_list = gather_install_list(args.manifest)

    # Prior to downloading anything, populate the other lists
    additions_list = list()
    item_list = list()
    except_list = list()
    exceptions = list()
    # exceptions[] is a list of exceptions specified by the extras file
    # except_list[] is a list of files downloaded into the exceptions dir
    if args.extras:
        # Additions are downloaded & added to the additions_list
        # Exceptions are added to the exceptions list,
        # Downloaded exceptions are added to the except_list list.
        handle_extras(
            args.extras,
            dir_struct['exceptions'],
            dir_struct['additions'],
            args.download,
            exceptions,
            except_list,
            additions_list
        )

    # Check for managed_install items and download them
    process_managed_installs(
        install_list,
        exceptions,
        except_list,
        item_list,
        dir_struct['exceptions'],
        dir_struct['downloads'],
        args.download
    )

    # Clean up cache of items we don't recognize
    print("Cleaning up downloads folder...")
    cleanup_local_cache(item_list, dir_struct['downloads'])
    print("Cleaning up exceptions folder...")
    cleanup_local_cache(except_list, dir_struct['exceptions'])

    # Icon handling
    if not args.noicons:
        # Download all icons from the catalogs used by the manifest
        catalog_item_list = []
        for catalog in os.listdir(dir_struct['catalogs']):
            catalog_item_list += plistlib.readPlist(
                os.path.join(dir_struct['catalogs'], catalog)
            )
        icon_pkg_file = handle_icons(catalog_item_list)
    if icon_pkg_file:
        additions_list.extend([icon_pkg_file])

    # Munki custom resources handling
    custom_pkg_file = handle_custom()
    if custom_pkg_file:
        additions_list.extend([custom_pkg_file])

    # Build each exception into its own package
    sys.stdout.flush()
    exceptions_pkg_list = build_exceptions(CACHE)
    additions_list.extend(exceptions_pkg_list)

    loglevel = str(args.loglevel)

    # Run any extra code or package builds
    sys.stdout.flush()
    pkg_list = autodmg_org.run_unique_code(args)
    additions_list.extend(pkg_list)

    # Now that cache is downloaded, let's add it to the AutoDMG template.
    print("Creating AutoDMG-full.adtmpl.")
    templatepath = os.path.join(CACHE, 'AutoDMG-full.adtmpl')

    plist = dict()
    plist["ApplyUpdates"] = True
    plist["SourcePath"] = args.source
    plist["TemplateFormat"] = "1.0"
    plist["VolumeName"] = args.volumename
    plist["VolumeSize"] = args.volumesize
    plist["AdditionalPackages"] = [
        os.path.join(
            dir_struct['downloads'], f
        ) for f in os.listdir(
            dir_struct['downloads']
        ) if (not f == '.DS_Store') and (f not in additions_list)
    ]

    if additions_list:
        plist["AdditionalPackages"].extend(additions_list)

    # Complete the AutoDMG-full.adtmpl template
    plistlib.writePlist(plist, templatepath)
    autodmg_cmd = [
        '/Applications/AutoDMG.app/Contents/MacOS/AutoDMG'
    ]
    if os.getuid() == 0:
        # We are running as root
        print("Running as root.")
        autodmg_cmd.append('--root')
    if args.update:
        # Update the profiles plist too
        print("Updating UpdateProfiles.plist...")
        cmd = autodmg_cmd + ['update']
        run(cmd)

    # Clean up cache of items we don't recognize
    print("Cleaning up downloads folder...")
    cleanup_local_cache(item_list, dir_struct['downloads'])
    print("Cleaning up exceptions folder...")
    cleanup_local_cache(except_list, dir_struct['exceptions'])

    logfile = os.path.join(args.logpath, 'build.log')
    # Now kick off the AutoDMG build
    dmg_output_path = os.path.join(CACHE, args.output)
    sys.stdout.flush()
    print("Building disk image...")
    if os.path.isfile(dmg_output_path):
        os.remove(dmg_output_path)
    cmd = autodmg_cmd + [
        '-L', loglevel,
        '-l', logfile,
        'build', templatepath,
        '--download-updates',
        '-o', dmg_output_path]
    print("Full command: %s" % cmd)
    run(cmd)
    if not os.path.isfile(dmg_output_path):
        print("Failed to create disk image!", file=sys.stderr)
        sys.exit(1)

    sys.stdout.flush()
    if args.dsrepo:
        # Check the Deploystudio masters to see if this image already exists
        populate_ds_repo(dmg_output_path, args.dsrepo)

    print("Ending run.")
    print(time.strftime("%c"))
Esempio n. 8
0
def main():
    """Main function."""
    wait_for_network()
    if not os.path.exists('/Applications/AutoDMG.app/Contents/MacOS/AutoDMG'):
        print("AutoDMG not at expected path in /Applications, quitting!")
        sys.exit(1)
    parser = argparse.ArgumentParser(
        description='Built a precached AutoDMG image.')
    parser.add_argument('-m',
                        '--manifest',
                        help='Manifest name. Defaults to "prod".',
                        default='prod')
    parser.add_argument('-o',
                        '--output',
                        help='Path to DMG to create.',
                        default='AutoDMG_full.hfs.dmg')
    parser.add_argument('--cache',
                        help=('Path to local cache to store files. '
                              'Defaults to "/Library/AutoDMG"'),
                        default='/Library/AutoDMG')
    parser.add_argument('-d',
                        '--download',
                        help='Force a redownload of all files.',
                        action='store_true',
                        default=False)
    parser.add_argument('-l',
                        '--logpath',
                        help='Path to log files for AutoDMG.',
                        default='/Library/AutoDMG/logs/')
    parser.add_argument('-s',
                        '--source',
                        help='Path to base OS installer.',
                        default='/Applications/Install OS X El Capitan.app')
    parser.add_argument('-n',
                        '--volumename',
                        help=('Name of volume after imaging. '
                              'Defaults to "Macintosh HD."'),
                        default='Macintosh HD')
    parser.add_argument('-S',
                        '--volumesize',
                        help=('Size of volume after imaging. '
                              'Defaults to 120'),
                        default=120)
    parser.add_argument('--loglevel',
                        help='Set loglevel between 1 and 7. Defaults to 6.',
                        choices=range(1, 8),
                        type=int,
                        default=6)
    parser.add_argument('--dsrepo', help='Path to DeployStudio repo. ')
    parser.add_argument('--noicons',
                        help="Don't cache icons.",
                        action='store_true',
                        default=False)
    parser.add_argument('-u',
                        '--update',
                        help='Update the profiles plist.',
                        action='store_true',
                        default=False)
    parser.add_argument('--extras',
                        help=('Path to JSON file containing additions '
                              ' and exceptions lists.'))
    args = parser.parse_args()

    print("Using Munki repo: %s" % MUNKI_URL)
    global CACHE
    CACHE = args.cache

    print(time.strftime("%c"))
    print("Starting run...")
    # Create the local cache directories
    dir_struct = {
        'additions': os.path.join(CACHE, 'additions'),
        'catalogs': os.path.join(CACHE, 'catalogs'),
        'downloads': os.path.join(CACHE, 'downloads'),
        'exceptions': os.path.join(CACHE, 'exceptions'),
        'exceptions_pkgs': os.path.join(CACHE, 'exceptions_pkgs'),
        'manifests': os.path.join(CACHE, 'manifests'),
        'icons': os.path.join(CACHE, 'icons'),
        'logs': os.path.join(CACHE, 'logs'),
        'client_resources': os.path.join(CACHE, 'client_resources'),
    }
    path_creation = prepare_local_paths(dir_struct.values())
    if path_creation > 0:
        print("Error setting up local cache directories.")
        sys.exit(-1)

    # Populate the list of installs based on the manifest
    install_list = gather_install_list(args.manifest)

    # Prior to downloading anything, populate the other lists
    additions_list = list()
    item_list = list()
    except_list = list()
    exceptions = list()
    # exceptions[] is a list of exceptions specified by the extras file
    # except_list[] is a list of files downloaded into the exceptions dir
    if args.extras:
        # Additions are downloaded & added to the additions_list
        # Exceptions are added to the exceptions list,
        # Downloaded exceptions are added to the except_list list.
        handle_extras(args.extras, dir_struct['exceptions'],
                      dir_struct['additions'], args.download, exceptions,
                      except_list, additions_list)

    # Check for managed_install items and download them
    process_managed_installs(install_list, exceptions, except_list, item_list,
                             dir_struct['exceptions'], dir_struct['downloads'],
                             args.download)

    # Clean up cache of items we don't recognize
    print("Cleaning up downloads folder...")
    cleanup_local_cache(item_list, dir_struct['downloads'])
    print("Cleaning up exceptions folder...")
    cleanup_local_cache(except_list, dir_struct['exceptions'])

    # Icon handling
    if not args.noicons:
        # Download all icons from the catalogs used by the manifest
        catalog_item_list = []
        for catalog in os.listdir(dir_struct['catalogs']):
            catalog_item_list += plistlib.readPlist(
                os.path.join(dir_struct['catalogs'], catalog))
        icon_pkg_file = handle_icons(catalog_item_list)
    if icon_pkg_file:
        additions_list.extend([icon_pkg_file])

    # Munki custom resources handling
    custom_pkg_file = handle_custom()
    if custom_pkg_file:
        additions_list.extend([custom_pkg_file])

    # Build each exception into its own package
    sys.stdout.flush()
    exceptions_pkg_list = build_exceptions(CACHE)
    additions_list.extend(exceptions_pkg_list)

    loglevel = str(args.loglevel)

    # Run any extra code or package builds
    sys.stdout.flush()
    pkg_list = autodmg_org.run_unique_code(args)
    additions_list.extend(pkg_list)

    # Now that cache is downloaded, let's add it to the AutoDMG template.
    print("Creating AutoDMG-full.adtmpl.")
    templatepath = os.path.join(CACHE, 'AutoDMG-full.adtmpl')

    plist = dict()
    plist["ApplyUpdates"] = True
    plist["SourcePath"] = args.source
    plist["TemplateFormat"] = "1.0"
    plist["VolumeName"] = args.volumename
    plist["VolumeSize"] = args.volumesize
    plist["AdditionalPackages"] = [
        os.path.join(dir_struct['downloads'], f)
        for f in os.listdir(dir_struct['downloads'])
        if (not f == '.DS_Store') and (f not in additions_list)
    ]

    if additions_list:
        plist["AdditionalPackages"].extend(additions_list)

    # Complete the AutoDMG-full.adtmpl template
    plistlib.writePlist(plist, templatepath)
    autodmg_cmd = ['/Applications/AutoDMG.app/Contents/MacOS/AutoDMG']
    if os.getuid() == 0:
        # We are running as root
        print("Running as root.")
        autodmg_cmd.append('--root')
    if args.update:
        # Update the profiles plist too
        print("Updating UpdateProfiles.plist...")
        cmd = autodmg_cmd + ['update']
        run(cmd)

    # Clean up cache of items we don't recognize
    print("Cleaning up downloads folder...")
    cleanup_local_cache(item_list, dir_struct['downloads'])
    print("Cleaning up exceptions folder...")
    cleanup_local_cache(except_list, dir_struct['exceptions'])

    logfile = os.path.join(args.logpath, 'build.log')
    # Now kick off the AutoDMG build
    dmg_output_path = os.path.join(CACHE, args.output)
    sys.stdout.flush()
    print("Building disk image...")
    if os.path.isfile(dmg_output_path):
        os.remove(dmg_output_path)
    cmd = autodmg_cmd + [
        '-L', loglevel, '-l', logfile, 'build', templatepath,
        '--download-updates', '-o', dmg_output_path
    ]
    print("Full command: %s" % cmd)
    run(cmd)
    if not os.path.isfile(dmg_output_path):
        print("Failed to create disk image!", file=sys.stderr)
        sys.exit(1)

    sys.stdout.flush()
    if args.dsrepo:
        # Check the Deploystudio masters to see if this image already exists
        populate_ds_repo(dmg_output_path, args.dsrepo)

    print("Ending run.")
    print(time.strftime("%c"))