def create_commit(imported_item): '''Creates a new feature branch, commits the changes, switches back to master''' # print "Changing location to %s" % autopkglib.get_pref('MUNKI_REPO') os.chdir(autopkglib.get_pref('MUNKI_REPO')) # Now, we need to create a feature branch print "Creating feature branch." branch = '%s-%s' % (str( imported_item['name']), str(imported_item["version"])) print change_feature_branch(branch) # Now add all items to git staging print "Adding items..." gitaddcmd = ['add', '--all'] gitaddcmd.append(autopkglib.get_pref("MUNKI_REPO")) print git_run(gitaddcmd) # Create the commit print "Creating commit..." gitcommitcmd = ['commit', '-m'] message = "Updating %s to version %s" % (str( imported_item['name']), str(imported_item["version"])) gitcommitcmd.append(message) print git_run(gitcommitcmd) # Switch back to master branch = 'master' print change_feature_branch(branch)
def create_commit(imported_item): '''Creates a new feature branch, commits the changes, switches back to master''' # print "Changing location to %s" % autopkglib.get_pref('MUNKI_REPO') os.chdir(autopkglib.get_pref('MUNKI_REPO')) # Now, we need to create a feature branch print "Creating feature branch." branch = '%s-%s' % (str(imported_item['name']), str(imported_item["version"])) print change_feature_branch(branch) # Now add all items to git staging print "Adding items..." gitaddcmd = ['add', '--all'] gitaddcmd.append(autopkglib.get_pref("MUNKI_REPO")) print git_run(gitaddcmd) # Create the commit print "Creating commit..." gitcommitcmd = ['commit', '-m'] message = "Updating %s to version %s" % (str(imported_item['name']), str(imported_item["version"])) gitcommitcmd.append(message) print git_run(gitcommitcmd) # Switch back to master branch = 'master' print change_feature_branch(branch)
def git_cmd(self): """Returns a path to a git binary, priority in the order below. Returns None if none found. 1. app pref 'GIT_PATH' 2. a 'git' binary that can be found in the PATH environment variable 3. '/usr/bin/git' """ def is_executable(exe_path): '''Is exe_path executable?''' return os.path.exists(exe_path) and os.access(exe_path, os.X_OK) git_path_pref = get_pref("GIT_PATH") if git_path_pref: if is_executable(git_path_pref): # take a GIT_PATH pref return git_path_pref else: log_err("WARNING: Git path given in the 'GIT_PATH' preference:" " '%s' either doesn't exist or is not executable! " "Falling back to one set in PATH, or /usr/bin/git." % git_path_pref) for path_env in os.environ["PATH"].split(":"): gitbin = os.path.join(path_env, "git") if is_executable(gitbin): # take the first 'git' in PATH that we find return gitbin if is_executable("/usr/bin/git"): # fall back to /usr/bin/git return "/usr/bin/git" return None
def __init__(self, curl_path=None, curl_opts=None): super(GitHubSession, self).__init__() self.env = {} self.env["url"] = None if curl_path: self.env["CURL_PATH"] = curl_path if curl_opts: self.env["curl_opts"] = curl_opts token = get_pref("GITHUB_TOKEN") self.http_result_code = None if token: self.token = token elif os.path.exists(TOKEN_LOCATION): try: with open(TOKEN_LOCATION, "r") as tokenf: self.token = tokenf.read() except IOError as err: log_err( "Couldn't read token file at {}! Error: {}".format( TOKEN_LOCATION, err ) ) self.token = None else: self.token = None
def curl_binary(self): """Return a path to a curl binary, priority in the order below. Return None if none found. 1. env['CURL_PATH'] 2. app pref 'CURL_PATH' 3. a 'curl' binary that can be found in the PATH environment variable 4. '/usr/bin/curl' """ if "CURL_PATH" in self.env and is_executable(self.env["CURL_PATH"]): return self.env["CURL_PATH"] curl_path_pref = get_pref("CURL_PATH") if curl_path_pref: if is_executable(curl_path_pref): return curl_path_pref else: log_err( "WARNING: curl path given in the 'CURL_PATH' preference:'{}' " "either doesn't exist or is not executable! Falling back " "to one set in PATH, or /usr/bin/curl.".format( curl_path_pref)) for path_env in os.environ["PATH"].split(":"): curlbin = os.path.join(path_env, "curl") if is_executable(curlbin): return curlbin if is_executable("/usr/bin/curl"): return "/usr/bin/curl" raise ProcessorError("Unable to locate or execute any curl binary")
def main(self): '''Rebuild Munki catalogs in repo_path''' cache_dir = get_pref("CACHE_DIR") or os.path.expanduser( "~/Library/AutoPkg/Cache") current_run_results_plist = os.path.join(cache_dir, "autopkg_results.plist") try: run_results = plistlib.readPlist(current_run_results_plist) except IOError: run_results = [] something_imported = False # run_results is an array of autopackager.results, # which is itself an array. # look through all the results for evidence that # something was imported # this could probably be done as an array comprehension # but might be harder to grasp... for result in run_results: for item in result: if item.get("Processor") == "MunkiImporter": if item["Output"].get("pkginfo_repo_path"): something_imported = True break if not something_imported and not self.env.get("force_rebuild"): self.output("No need to rebuild catalogs.") self.env["makecatalogs_resultcode"] = 0 self.env["makecatalogs_stderr"] = "" else: # Generate arguments for makecatalogs. args = ["/usr/local/munki/makecatalogs"] if self.env["MUNKI_REPO"].startswith('/'): # looks a file path instead of a URL args.append(self.env["MUNKI_REPO"]) else: args.extend(["--repo-url", self.env["MUNKI_REPO"]]) if self.env.get("MUNKI_REPO_PLUGIN"): args.extend(["--plugin", self.env["MUNKI_REPO_PLUGIN"]]) # Call makecatalogs. try: proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (_, err_out) = proc.communicate() except OSError as err: raise ProcessorError( "makecatalog execution failed with error code %d: %s" % (err.errno, err.strerror)) self.env["makecatalogs_resultcode"] = proc.returncode self.env["makecatalogs_stderr"] = err_out if proc.returncode != 0: raise ProcessorError("makecatalogs failed: %s" % err_out) else: self.output("Munki catalogs rebuilt!")
def validate_preferences(prefs): """Return true if all preferences are set.""" if VERBOSE: display_verbose(prefs) prefs_valid = True if not autopkglib.get_pref('RECIPE_REPO_DIR'): timeprint('RECIPE_REPO_DIR is missing or empty.') prefs_valid = False if not autopkglib.get_pref('RECIPE_OVERRIDE_DIRS'): timeprint('RECIPE_OVERRIDE_DIRS is missing or empty.') prefs_valid = False if not prefs['repo_dir']: timeprint( 'repo_dir argument, GitRepo pref, or MUNKI_REPO is missing or empty.' ) prefs_valid = False return prefs_valid
def main(self): '''Rebuild Munki catalogs in repo_path''' cache_dir = get_pref("CACHE_DIR") or os.path.expanduser( "~/Library/AutoPkg/Cache") current_run_results_plist = os.path.join(cache_dir, "autopkg_results.plist") try: run_results = plistlib.readPlist(current_run_results_plist) except IOError: run_results = [] something_imported = False try: pkginfo_path = self.env["munki_importer_summary_result"]["data"][ "pkginfo_path"] except: pkginfo_path = None # run_results is an array of autopackager.results, # which is itself an array. # look through all the results for evidence that # something was imported # this could probably be done as an array comprehension # but might be harder to grasp... # for result in run_results: # self.output(result) # for item in result: # if "MunkiImporter" in item.get("Processor"): # self.output("We found MunkiImporter") # if item["Output"]["pkginfo_repo_path"]: # something_imported = True # break if pkginfo_path: something_imported = True if not something_imported and not self.env.get("force_import"): self.output(run_results) self.output("No updates so nothing to import to AirWatch") self.env["airwatch_resultcode"] = 0 self.env["airwatch_stderr"] = "" elif self.env.get("force_import") and not something_imported: #TODO: Upload all pkgs/pkginfos/icons to AW from munki repo #Look for munki code where it tries to find the icon in the repo pass else: pi = self.env["pkginfo_repo_path"] pkg = self.env["pkg_repo_path"] icon_path = None #self.output(self.awimport('pkginfo', pi)) #self.output(self.awimport('pkg', pkg)) self.output( self.awimport('pkg', pkg, 'pkginfo', pi, 'icon', icon_path))
def handle_recipe(recipe, pkg_path=None): """Handle the complete workflow of an autopkg recipe.""" display_verbose("Handling %s" % recipe) if autopkglib.get_pref('RECIPE_REPO_DIR'): recipe_repo_dir = autopkglib.get_pref('RECIPE_REPO_DIR') else: recipe_repo_dir = os.path.expanduser('~/Library/AutoPkg/RecipeRepos') report_plist_path = os.path.join( os.path.dirname(recipe_repo_dir), 'autopkg.plist' ) # 1. Syncing is no longer implemented # 2. Parse recipe name for basic item name branchname = parse_recipe_name(recipe) # 3. Create feature branch create_feature_branch(branchname) # 4. Run autopkg for that recipe run_recipe(recipe, report_plist_path, pkg_path) # 5. Parse report plist run_results = parse_report_plist(report_plist_path) if not run_results['imported'] and not run_results['failed']: # Nothing happened cleanup_branch(branchname) return if run_results['failed']: # Item failed, so file a task failed_task(run_results['failed']) cleanup_branch(branchname) return if run_results['imported']: # Item succeeded, so continue. # 6. Run any binary-handling middleware binary_middleware(run_results['imported'][0]) # 7. If any changes occurred, create git commit create_commit(run_results['imported'][0]) # 8. Rename branch with version rename_branch_version( branchname, str(run_results['imported'][0]['version']) ) # 9. File a task imported_task(run_results['imported'][0]) # 10. Switch back to master change_feature_branch('master')
def create_parent_list(recipes): '''Returns a list of all parent recipes used by recipe list''' # build search list searchlist = glob.glob( os.path.join(autopkglib.get_pref('RECIPE_REPO_DIR'), "*")) # print "Recipe search list: %s" % searchlist parent_list = list() for recipe in recipes: parent_list.extend(get_recipe_parents(recipe, searchlist)) # parent_list now has all recipes used to run those above # print "Total parent list: %s" % parent_list return parent_list
def create_parent_list(recipes): '''Returns a list of all parent recipes used by recipe list''' # build search list searchlist = glob.glob(os.path.join( autopkglib.get_pref('RECIPE_REPO_DIR'), "*")) # print "Recipe search list: %s" % searchlist parent_list = list() for recipe in recipes: parent_list.extend(get_recipe_parents(recipe, searchlist)) # parent_list now has all recipes used to run those above # print "Total parent list: %s" % parent_list return parent_list
def main(self): '''Rebuild Munki catalogs in repo_path''' cache_dir = get_pref("CACHE_DIR") or os.path.expanduser( "~/Library/AutoPkg/Cache") current_run_results_plist = os.path.join( cache_dir, "autopkg_results.plist") try: run_results = plistlib.readPlist(current_run_results_plist) except IOError: run_results = [] something_imported = False # run_results is an array of autopackager.results, # which is itself an array. # look through all the results for evidence that # something was imported # this could probably be done as an array comprehension # but might be harder to grasp... for result in run_results: for item in result: if item.get("Processor") == "MunkiImporter": if item["Output"].get("pkginfo_repo_path"): something_imported = True break if not something_imported and not self.env.get("force_rebuild"): self.output("No need to rebuild catalogs.") self.env["makecatalogs_resultcode"] = 0 self.env["makecatalogs_stderr"] = "" else: # Generate arguments for makecatalogs. args = ["/usr/local/munki/makecatalogs", self.env["munki_repo_path"]] # Call makecatalogs. try: proc = subprocess.Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (_, err_out) = proc.communicate() except OSError as err: raise ProcessorError( "makecatalog execution failed with error code %d: %s" % (err.errno, err.strerror)) self.env["makecatalogs_resultcode"] = proc.returncode self.env["makecatalogs_stderr"] = err_out if proc.returncode != 0: raise ProcessorError("makecatalogs failed: %s" % err_out) else: self.output("Munki catalogs rebuilt! Now compressing repo into a tarball") make_tarfile(os.path.dirname(self.env["munki_repo_path"]) + '/latest.tar.gz', self.env["munki_repo_path"]) self.output("Tarball complete")
def handle_recipe(recipe, pkg_path=None): """Handle the complete workflow of an autopkg recipe.""" display_verbose("Handling %s" % recipe) if autopkglib.get_pref('RECIPE_REPO_DIR'): recipe_repo_dir = autopkglib.get_pref('RECIPE_REPO_DIR') else: recipe_repo_dir = os.path.expanduser('~/Library/AutoPkg/RecipeRepos') report_plist_path = os.path.join(os.path.dirname(recipe_repo_dir), 'autopkg.plist') # 1. Syncing is no longer implemented # 2. Parse recipe name for basic item name branchname = parse_recipe_name(recipe) # 3. Create feature branch create_feature_branch(branchname) # 4. Run autopkg for that recipe run_recipe(recipe, report_plist_path, pkg_path) # 5. Parse report plist run_results = parse_report_plist(report_plist_path) if not run_results['imported'] and not run_results['failed']: # Nothing happened cleanup_branch(branchname) return if run_results['failed']: # Item failed, so file a task failed_task(run_results['failed']) cleanup_branch(branchname) return if run_results['imported']: # Item succeeded, so continue. # 6. Run any binary-handling middleware binary_middleware(run_results['imported'][0]) # 7. If any changes occurred, create git commit create_commit(run_results['imported'][0]) # 8. Rename branch with version rename_branch_version(branchname, str(run_results['imported'][0]['version'])) # 9. File a task imported_task(run_results['imported'][0]) # 10. Switch back to master change_feature_branch('master')
def __init__(self): token = get_pref("GITHUB_TOKEN") if token: self.token = token elif os.path.exists(TOKEN_LOCATION): try: with open(TOKEN_LOCATION, "r") as tokenf: self.token = tokenf.read() except IOError as err: log_err("Couldn't read token file at %s! Error: %s" % (TOKEN_LOCATION, err)) self.token = None else: self.token = None
def __init__(self): token = get_pref('GITHUB_TOKEN') if token: self.token = token elif os.path.exists(TOKEN_LOCATION): try: with open(TOKEN_LOCATION, "r") as tokenf: self.token = tokenf.read() except IOError as err: print >> sys.stderr, ( "Couldn't read token file at %s! Error: %s" % (TOKEN_LOCATION, err)) self.token = None else: self.token = None
def read_preferences(args): """Read our preferences and return a dict.""" prefs_dict = {} # Equivalent to -l/--list prefs_dict['runlist'] = args.list or get_pref('RunList') or [] # Equivalent to -v/--verbose prefs_dict['verbosity'] = (bool(args.verbose or get_pref('DebugMode')) or False) # Equivalent to -g/--gitrepo prefs_dict['repo_dir'] = (args.gitrepo or get_pref('GitRepo') or autopkglib.get_pref('MUNKI_REPO') or None) # Equivalent to --arc prefs_dict['use_arcanist'] = (bool(args.arc or get_pref('UseArcanist')) or False) return prefs_dict
def _get_token(self, token_path: str = TOKEN_LOCATION) -> Optional[str]: """Reads token from perferences or provided token path. Defaults to TOKEN_LOCATION for the token path. Otherwise returns None. """ token = get_pref("GITHUB_TOKEN") if not token and os.path.exists(token_path): try: with open(token_path, "r") as tokenf: token = tokenf.read().strip() except OSError as err: log_err( f"Couldn't read token file at {token_path}! Error: {err}") token = None # TODO: validate token given we found one but haven't checked its # auth status return token
def read_preferences(args): """Read our preferences and return a dict.""" prefs_dict = {} # Equivalent to -l/--list prefs_dict['runlist'] = args.list or get_pref('RunList') or [] # Equivalent to -v/--verbose prefs_dict['verbosity'] = ( bool(args.verbose or get_pref('DebugMode')) or False ) # Equivalent to -g/--gitrepo prefs_dict['repo_dir'] = ( args.gitrepo or get_pref('GitRepo') or autopkglib.get_pref('MUNKI_REPO') or None ) # Equivalent to --arc prefs_dict['use_arcanist'] = ( bool(args.arc or get_pref('UseArcanist')) or False ) return prefs_dict
group = parser.add_mutually_exclusive_group() group.add_argument( '-l', '--list', help='Path to a plist, JSON, or YAML list of recipe names.' ) group.add_argument( '-r', '--recipes', nargs='+', help='Recipes to run.' ) parser.add_argument( '-v', '--verbose', action='store_true', help='Print verbose messages.' ) parser.add_argument( '-g', '--gitrepo', help='Path to git repo. Defaults to MUNKI_REPO from Autopkg preferences.', default=autopkglib.get_pref('MUNKI_REPO') ) parser.add_argument( '-a', '--arc', help='Use arcanist instead of git for branches.', action='store_true', default=False ) parser.add_argument( '-d', '--dev', help='Dev mode - debug logging.', action='store_true', default=False ) parser.add_argument( '-p', '--pkg', help=('Path to a pkg or dmg to provide to a recipe.\n' 'Ignored if you pass in more than once recipe to -r,' ' or -l.'),
def rsync_run(source, destination): '''Runs rsync from source to destination''' rsync_args = [ source, destination ] print rsync(rsync_args) if __name__ == '__main__': print "Starting: %s" % time.ctime() print "Beginning autopkg_runner.py execution..." recipe_failures = {} report_plist_path = os.path.join( os.path.dirname(autopkglib.get_pref('RECIPE_REPO_DIR')), 'autopkg.plist' ) # Build the list of all parent recipes we'll need to run these recipes parent_list = create_parent_list(RECIPES) dest_dir = os.path.expanduser(autopkglib.get_pref('RECIPE_SEARCH_DIRS')[1]) # print "dest_dir: %s" % dest_dir print "Creating destination directories..." # Gather a list of all the directory names for the recipes to be copied into dirlist = create_destinations_list(dest_dir, parent_list) print "Syncing recipes to recipes folder." # Special case: makes sure autopkg/recipes/Munki/MakeCatalogs.munki joins in autopkg_recipes = os.path.join( autopkglib.get_pref('RECIPE_REPO_DIR'), 'com.github.autopkg.autopkg-recipes',
group = parser.add_mutually_exclusive_group() group.add_argument( '-l', '--list', help='Path to a plist, JSON, or YAML list of recipe names.') group.add_argument('-r', '--recipes', nargs='+', help='Recipes to run.') parser.add_argument('-v', '--verbose', action='store_true', help='Print verbose messages.') parser.add_argument( '-g', '--gitrepo', help= 'Path to git repo. Defaults to MUNKI_REPO from Autopkg preferences.', default=autopkglib.get_pref('MUNKI_REPO')) parser.add_argument('-a', '--arc', help='Use arcanist instead of git for branches.', action='store_true', default=False) parser.add_argument('-d', '--dev', help='Dev mode - debug logging.', action='store_true', default=False) parser.add_argument( '-p', '--pkg', help=('Path to a pkg or dmg to provide to a recipe.\n' 'Ignored if you pass in more than once recipe to -r,'
task_description = 'Error: %s' % failed_item["message"] create_task(task_title, task_description) def rsync_run(source, destination): '''Runs rsync from source to destination''' rsync_args = [source, destination] print rsync(rsync_args) if __name__ == '__main__': print "Starting: %s" % time.ctime() print "Beginning autopkg_runner.py execution..." recipe_failures = {} report_plist_path = os.path.join( os.path.dirname(autopkglib.get_pref('RECIPE_REPO_DIR')), 'autopkg.plist') # Build the list of all parent recipes we'll need to run these recipes parent_list = create_parent_list(RECIPES) dest_dir = os.path.expanduser(autopkglib.get_pref('RECIPE_SEARCH_DIRS')[1]) # print "dest_dir: %s" % dest_dir print "Creating destination directories..." # Gather a list of all the directory names for the recipes to be copied into dirlist = create_destinations_list(dest_dir, parent_list) print "Syncing recipes to recipes folder." # Special case: makes sure autopkg/recipes/Munki/MakeCatalogs.munki joins in autopkg_recipes = os.path.join( autopkglib.get_pref('RECIPE_REPO_DIR'), 'com.github.autopkg.autopkg-recipes', )