def __init__(self): self.args = parse_args() # default path for configs self.app_config_dir = os.path.abspath("../config") # Update args from config file if self.args.config_file: abs_config_file = os.path.abspath( os.path.dirname(self.args.config_file)) self.args.__dict__.update( AppetiteArgs.load_args(self.args.config_file)) self.app_config_dir = abs_config_file self.app_commands_file = os.path.join(self.app_config_dir, "commands.conf") if self.args.command_conf: self.app_commands_file = os.path.join( os.path.abspath(os.path.expandvars(self.args.command_conf))) AppetiteArgs.args_check(self.args) # Set up logging after args are set Logger.setup_logging('appetite_%s' % self.args.refname, self.args.refname, True, self.args.disable_logging, self.args.silent, self.args.logging_path) # Get path for lock file self.scratch_location = os.path.join( os.path.abspath(os.path.expandvars(self.args.scratch_dir)), self.args.refname) self.run_check.set_lockfile( os.path.join(self.scratch_location, Helpers.APPETITE_LOCKFILE)) # Makes sure there is only one instance of this script running if self.run_check.lock().is_running: Logger.info("Appetite is already processing") self.print_track_info(False, Helpers.get_track()) return # Get host classes for filtering self.host_classes = self.args.host_classes.split(" ") \ if isinstance(self.args.host_classes, basestring) \ else self.args.host_classes # Reverse sorting needed for name filtering self.host_classes.sort(reverse=True) # Get boot ordering self.boot_ordering = Helpers.get_enchanced_boot_order( self.args.boot_order, self.host_classes) self.repo_name = self.args.repo_url.split('/')[-1].split('.')[0] self.repo_path = os.path.join(self.scratch_location, self.repo_name) self.apps_folder = os.path.join(self.repo_path, self.args.apps_folder) self.manifest_path = os.path.join(self.repo_path, Consts.CONFIG_PATH_NAME, self.args.apps_manifest) self.tmp_folder = os.path.join(self.scratch_location, self.args.tmp_folder) self.meta_folder = os.path.join(self.scratch_location, 'meta') self.tars_folder = os.path.join(self.tmp_folder, 'tars') self.hosts_folder = os.path.join(self.tmp_folder, 'hosts') self.remote_apps_path = os.path.normpath(self.args.app_folder) self.base_location, self.base_name = os.path.split( self.remote_apps_path) self.name_formatting = self.args.name_formatting.strip('"\'') self.meta_name = "%s%s" % (Consts.APPS_METADATE_FILENAME, self.args.refname) self.meta_remote_folder = os.path.join(self.remote_apps_path, Consts.META_DIR) self.meta_remote_logs_folder = os.path.join( self.meta_remote_folder, Consts.HOST_LOGS_FOLDER_NAME) self.meta_remote_file = "%s.json" % os.path.join( self.meta_remote_folder, self.meta_name) if self.args.clean: Helpers.delete_path(self.scratch_location) self.repo_manager = RepoManager(self.repo_name, self.args.repo_url, self.args.repo_branch, "", self.scratch_location, self.args.apps_manifest, self.args.dryrun) Logger.debug_on(self.args.debug) self.ssh_app_commands = None self.deployment_manager = None self.template_values = {}
def process_hosts(self): if not self.args.tmp_folder: Logger.errorout("tmp folder must be defined") # Deleting the tmp folder to keep installs clean Helpers.delete_path(self.tmp_folder) try: if self.args.template_files: template_paths = self.args.template_files # Incase one long string is entered if isinstance(self.args.template_files, basestring): template_paths = self.args.template_files.split(' ') self.template_values = Helpers.load_templating(template_paths) except Exception as exception: Logger.errorout("No templating problem: %s" % exception.message) if self.args.template_json: try: self.template_values.update( json.loads(self.args.template_json.replace('\\"', '"'))) except Exception as err: Logger.errorout("Error parsing --template-json", error=err.message) if self.args.template_filtering: self.template_values = Helpers.filter_object( self.template_values, self.args.template_filtering) ConnManager.set_globals(self.args.ssh_user, self.args.ssh_keyfile, self.args.ssh_port, self.args.app_folder, self.args.app_binary, self.args.dryrun) # Load any files reference to appetite scripts folder before this # Working directories change with repo management repo_status = self.repo_manager.pull_repo(self.args.clean_repo) if not self.args.dryrun and repo_status < 0: Logger.errorout('Repo Error, Look at logs for details') repo_check_status = self.repo_manager.check_for_update() Logger.add_track_info(self.repo_manager.track) triggered = repo_check_status['triggered'] or repo_status == 1 Logger.info('Repo pull', output=repo_check_status['output'], triggered=triggered) if not self.args.dryrun and not self.args.skip_repo_sync and not triggered: Logger.info('No repo update found', complete=False) self.print_track_info(False) sys.exit(0) self.repo_manager.set_commit_id() # Load in deploymentmethods.conf self.deployment_manager = DeploymentMethodsManager( self.repo_name, "", self.scratch_location, self.args.deployment_methods_file) # Generate hosts if self.args.hosts: # Incase one long string is entered if len(self.args.hosts) == 1: self.args.hosts = self.args.hosts[0].split(' ') for host in self.args.hosts: split_hostname = host.strip("'\"").split(':') clean_hostname = split_hostname[0].split('.')[0].strip("'\"") # With user name, the ssh hostname can be defined. This allows # for IP addresses to be defined incase there is no DNS. host_data = Helpers.pull_class_from_host( self.name_formatting, clean_hostname, self.host_classes) if host_data: # Can use a specified hostname/IP. # Default is the given hostname ssh_host = split_hostname[len(split_hostname) - 1] self.appetite_hosts.add_host(self, clean_hostname, host_data, ssh_host) else: # Create hosts based on classes for host_class in self.host_classes: self.appetite_hosts.add_host( self, Helpers.build_hostname( self.name_formatting, # pylint: disable=no-value-for-parameter host_class, 1)) if self.appetite_hosts.is_empty(): Logger.errorout("No hosts found after filtering") if self.args.clean_metas: Helpers.delete_path(self.meta_folder) # Only update if a manifest file is not found self.update_manifests(check_if_exists=True) Logger.info("appetite started", use_templating=self.args.templating, firstrun=self.args.firstrun) self.populate_apps_to_hosts() self.ssh_app_commands = ConnManager.SshAppCommands( self.app_commands_file, self.template_values) changes_found = self.create_host_directories_and_tar() if changes_found: Logger.info("Start host updates") self.update_hosts() Logger.info("End host updates") self.print_track_info(changes_found) Logger.info("Appetite complete", complete=True, changes=changes_found)
def create_host_directories_and_tar(self): """Main packaging function Works in 3 parts: 1. Validate app data and configurations 2. Create tmp directories for each host with loaded apps and manifest 3. Package (tar) up host tmp directories for distribution """ Helpers.delete_path(self.tmp_folder) Helpers.create_path(self.tars_folder, True) self.repo_manager.set_commit_id() master_commit_log = self.repo_manager.get_commit_log() errors_found = False changes_found = False for host in self.appetite_hosts: # pylint: disable=too-many-nested-blocks # Per host build apps folder and tar up based on class hostname = host.hostname apps = host.get_apps(self.args.refname) tarname = host.tarname apps = sorted(apps, key=lambda app: app.commit_id) tmp_hostname_dir = os.path.join(self.hosts_folder, hostname) tmp_hostname_meta = os.path.join(tmp_hostname_dir, Consts.META_DIR) apps_meta = [] if len(apps) < 1: Logger.warn("Host with no apps", hostname=hostname) continue # Parse the remote meta file from the host # This file might not exist remote_meta_file = host.local_meta_file remote_metas_loaded = False if os.path.exists(remote_meta_file): try: with open(remote_meta_file) as remote_data_file: remote_metas_master = json.load(remote_data_file) remote_metas_content = remote_metas_master['content'] \ if 'content' in remote_metas_master else remote_metas_master remote_metas = [ AppetiteHost.create_app_from_object( self.repo_manager, self.deployment_manager, meta_data) for meta_data in remote_metas_content ] remote_metas_loaded = True except Exception as exception: Logger.error("Problems loading meta file", error=exception.message, path=remote_meta_file) elif not self.args.dryrun: Logger.warn("Local version of remote meta not found", file=remote_meta_file) ordered_unique_apps = sorted(list(set(apps)), key=lambda single_app: (single_app.name, single_app. commit_id, single_app.method_name)) for iapp in ordered_unique_apps: app_occurrences = apps.count(iapp) if app_occurrences > 1: Logger.warn("Dup app found", host=host.hostname, app_info=iapp.app_key, occurences=app_occurrences) # Validate app data and configurations # Go through the apps and checks to see if there are any errors # This is where the remote meta is compared to the newly generated # lists of apps from the manifest for app in apps: raw_app_path = os.path.join(self.apps_folder, app.name) # Check the commit Id for problems if app.commit_id: self.repo_manager.set_commit_id(app.commit_id) else: # pylint: disable=else-if-used if self.args.strict_commitids: Logger.error("Application with missing commit Id", hostname=hostname, app=app.name) errors_found = True continue else: app._commit_id = master_commit_log['app_commit_id'] # pylint: disable=protected-access self.repo_manager.set_commit_id(app.commit_id) # Checks if app listed in the manifest # exists with the correct commit id if Helpers.check_path(raw_app_path): meta_to_append = None app.refresh_version_info(self.args.refname, Consts.META_APP_UNCHANGED) remote_meta = None # Check to see what has changed if remote_metas_loaded: # Searches remote meta to see if application already exists remote_meta = next((rmeta for rmeta in remote_metas if app.check_names(rmeta)), None) if remote_meta: # If app does exist on system, have the commit ids changed if remote_meta.commit_id != app.commit_id: meta_to_append = app.set_status_changed() else: # meta has not changed so use existing meta meta_to_append = app.clone meta_to_append.update_app_version(app) # to track if an app is removed from the remote meta remote_metas.remove(remote_meta) if not meta_to_append: # There is no remote meta so all files should be added meta_to_append = app.set_status_added() if remote_meta and meta_to_append: meta_outcome = Helpers.debug_app_versions( meta_to_append, remote_meta, meta_to_append.status) Logger.debug("Check meta logic", outcome=meta_outcome) if meta_to_append.has_changed: Logger.info("App change", logic=meta_outcome) apps_meta.append(meta_to_append) else: Logger.error("Missing application", hostname=hostname, app=app.name, path=raw_app_path) continue if remote_metas_loaded and len(remote_metas) > 0: # Any apps left in the remote meta do not exist in the current # manifest and should be deleted delete_list = [] for deleted_app in remote_metas: if deleted_app.method_info: deleted_app.set_status_deleted() # Added logic check to catch method changes added_app_found = next(( app for app in apps_meta if app.status == Consts.META_APP_ADDED and app.name == deleted_app.name and app.method_info['path'] == deleted_app.method_info['path']), None) if added_app_found: added_app_found.set_status_changed() else: delete_list.append(deleted_app) else: Logger.error( "Problems with method info for deleted app.", hostname=hostname, app=deleted_app.name) apps_meta += delete_list # Only do something if there has been a change if len([app for app in apps_meta if not app.is_unchanged]) < 1: continue # No point continuing if there is no connection to the host if not self.check_host_connection(host): continue # Clean command lines for auth params # This data is ingested so creds should be removed # apps_meta = [updated_app.clone for updated_app in apps_meta] if not self.args.disable_logging: for updated_app in apps_meta: Logger.log_event(updated_app.to_dict) # Applications that actually needs to be updated tar_apps = sorted([ updated_app for updated_app in apps_meta if updated_app.updated ], key=lambda tar_app: tar_app.app) use_templating = self.template_values and self.args.templating # Checking will allow templating otherwise will skip steps Helpers.create_path( os.path.join(tmp_hostname_meta, Consts.HOST_LOGS_FOLDER_NAME), True) if len(tar_apps) > 0: # All error checks have been done above, build out # the hosts directory and tar up for updated_app in tar_apps: app_path = os.path.join(tmp_hostname_dir, updated_app.method_info['path']) Helpers.create_path(app_path, True) raw_app_path = os.path.join(self.apps_folder, updated_app.name) self.repo_manager.set_commit_id(updated_app.commit_id) if updated_app.update_method_is_copy: app_dest = os.path.join(app_path, updated_app.app_clean) else: app_dest = app_path copy_tree(raw_app_path, app_dest) lookups_inclusion_location = os.path.join( app_dest, self.deployment_manager.inclusion_filename) ignore_dir = os.path.join(app_dest, Consts.TMP_IGNORE_DIR) # Ignore files/folders set in the global configurations if self.args.install_ignore: content_ignored_results = Helpers.move_regexed_files( self.args.install_ignore.split(';'), app_dest, ignore_dir) files_included = content_ignored_results['files_moved'] if len(files_included) > 0: Logger.error( "Globally these files should not exist in the App. " "The files have been removed from the install.", files=files_included, hostname=hostname, app=updated_app.name) # Users should not have the capability to include files from the # global ignore. Helpers.delete_path(ignore_dir) # Defined folders/files are to move out of application. # This is defined in the deploymentmethods.conf # If an app is installed for the first time, all files should be included if 'install_ignore' in updated_app.method_info and not updated_app.is_added: Helpers.move_regexed_files( updated_app.method_info['install_ignore'], app_dest, ignore_dir) # If there is a inclusion file, include files back into app. # This is defined on a per app basis if os.path.isfile(lookups_inclusion_location): with open(lookups_inclusion_location, "r") as f: lines = [l.strip() for l in f.readlines()] lookup_inclusion_results = Helpers.move_regexed_files( lines, ignore_dir, app_dest) if lookup_inclusion_results['errors_found']: Logger.warn( "Lookup inclusion error found", paths=lookup_inclusion_results[ 'path_errors'], hostname=hostname, app=updated_app.name, todo="Remove file/path from inclusion..") # Problem with host inclusion, # move to next host continue updated_app.method_info['inclusions'] = \ lookup_inclusion_results['files_moved'] # Update objects with inclusions updated_app.copy_value_to_method_info( 'inclusions', apps_meta) os.remove(lookups_inclusion_location) Helpers.delete_path(ignore_dir) if use_templating and not updated_app.method_info[ 'skip_templating']: # Can template based on vars from templated # values, hosts vars and app vars Helpers.template_directory(app_dest, [ self.template_values, host.to_dict, updated_app.to_dict ]) # Should only change access and create version file if a whole app is copied if updated_app.update_method_is_copy: for host_path, host_dir, host_files in os.walk( app_dest): # pylint: disable=unused-variable for host_file in host_files: # Splunk apps can have active binaries in multiple languages # This is a catch all to make sure apps have all the required # permissions. chmod = 0755 os.chmod(os.path.join(host_path, host_file), chmod) if not updated_app.method_info['no_appetite_changes']: with open( os.path.join( app_dest, Helpers.get_app_version_filename()), "w") as f: f.write(updated_app.to_json) AppVersioning.create_app_version( app_dest, updated_app.commit_log['app_abbrev_commit_id']) apps_distro = Helpers.content_wrapper(apps_meta, Consts.META_CURRENT, hostname, self.track) # Meta file used as source of truth on instance master_meta = self.create_meta_files(tmp_hostname_meta, '', apps_distro) # check be used to update and test manifest changes locally if self.args.dryrun: Helpers.create_path(host.local_meta_file) shutil.copy(master_meta, host.local_meta_file) # Always want clean logs ingested selected_apps = Helpers.select_and_update_apps( apps_meta, Consts.META_CURRENT, False) self.create_meta_log(tmp_hostname_meta, '', selected_apps, Helpers.get_utc()) host.updates = Helpers.content_process(apps_meta, Consts.META_UPDATED, hostname, self.track, True) # Create the meta change file self.create_meta_files(tmp_hostname_meta, '_update', Helpers.content_convert(host.updates)) # Clean updates file for logging selected_apps = Helpers.select_and_update_apps( apps_meta, Consts.META_UPDATED, True) self.create_meta_log(tmp_hostname_meta, '_update', selected_apps, Helpers.get_utc()) Logger.info("Changes found", updates=Helpers.content_wrapper( apps_meta, Consts.META_UPDATED, hostname, self.track, True)) # Package (tar) up host tmp directories for distribution tar = tarfile.open( os.path.join(self.tars_folder, "%s.tar.gz" % tarname), "w:gz") tar.add(tmp_hostname_dir, arcname=os.path.basename(self.base_name)) tar.close() changes_found = True if errors_found: sys.exit(1) self.repo_manager.set_commit_id() return changes_found
def __init__(self, is_already_running): self.args = parse_args() # default path for configs self.app_config_dir = os.path.abspath("../config") # Update args from config file if self.args.config_file: abs_config_file = os.path.abspath( os.path.dirname(self.args.config_file)) self.args.__dict__.update( AppetiteArgs.load_args(self.args.config_file)) self.app_config_dir = abs_config_file self.app_commands_file = os.path.join(self.app_config_dir, "commands.conf") if self.args.command_conf: self.app_commands_file = os.path.join( os.path.abspath(os.path.expandvars(self.args.command_conf))) AppetiteArgs.args_check(self.args) # Set up logging after args are set Logger.setup_logging('appetite_%s' % self.args.refname, self.args.refname, True, self.args.disable_logging, self.args.silent, self.args.logging_path) if is_already_running: Logger.info("Appetite is already processing") self.print_track_info(False, Helpers.get_track()) return # Get host classes for filtering self.host_classes = self.args.host_classes.split(" ") \ if isinstance(self.args.host_classes, basestring) \ else self.args.host_classes # Reverse sorting needed for name filtering self.host_classes.sort(reverse=True) # Get boot ordering self.boot_ordering = Helpers.get_enchanced_boot_order( self.args.boot_order, self.host_classes) self.repo_name = self.args.repo_url.split('/')[-1].split('.')[0] self.scratch_location = os.path.join( os.path.abspath(os.path.expandvars(self.args.scratch_dir)), self.args.refname) self.repo_path = os.path.join(self.scratch_location, self.repo_name) self.apps_folder = os.path.join(self.repo_path, self.args.apps_folder) self.manifest_path = os.path.join(self.repo_path, Consts.CONFIG_PATH_NAME, self.args.apps_manifest) self.tmp_folder = os.path.join(self.scratch_location, self.args.tmp_folder) self.meta_folder = os.path.join(self.scratch_location, 'meta') self.tars_folder = os.path.join(self.tmp_folder, 'tars') self.hosts_folder = os.path.join(self.tmp_folder, 'hosts') self.remote_apps_path = os.path.normpath(self.args.app_folder) self.base_location, self.base_name = os.path.split( self.remote_apps_path) self.name_formatting = self.args.name_formatting.strip('"\'') self.meta_name = "%s%s" % (Consts.APPS_METADATE_FILENAME, self.args.refname) self.meta_remote_folder = os.path.join(self.remote_apps_path, Consts.META_DIR) self.meta_remote_logs_folder = os.path.join( self.meta_remote_folder, Consts.HOST_LOGS_FOLDER_NAME) self.meta_remote_file = "%s.json" % os.path.join( self.meta_remote_folder, self.meta_name) if self.args.clean: Helpers.delete_path(self.scratch_location) self.repo_manager = RepoManager(self.repo_name, self.args.repo_url, self.args.repo_branch, "", self.scratch_location, self.args.apps_manifest, self.args.dryrun) Logger.debug_on(self.args.debug) if not self.args.tmp_folder: Logger.errorout("tmp folder must be defined") # Deleting the tmp folder to keep installs clean Helpers.delete_path(self.tmp_folder) self.template_values = {} try: if self.args.template_files: template_paths = self.args.template_files # Incase one long string is entered if isinstance(self.args.template_files, basestring): template_paths = self.args.template_files.split(' ') self.template_values = Helpers.load_templating(template_paths) except Exception as exception: Logger.errorout("No templating problem: %s" % exception.message) if self.args.template_json: try: self.template_values.update( json.loads(self.args.template_json.replace('\\"', '"'))) except Exception as err: Logger.errorout("Error parsing --template-json", error=err.message) if self.args.template_filtering: self.template_values = Helpers.filter_object( self.template_values, self.args.template_filtering) ConnManager.set_globals(self.args.ssh_user, self.args.ssh_keyfile, self.args.ssh_port, self.args.app_folder, self.args.app_binary, self.args.dryrun) self.ssh_app_commands = ConnManager.SshAppCommands( self.app_commands_file, self.template_values) # Load any files reference to appetite scripts folder before this # Working directories change with repo management repo_status = self.repo_manager.pull_repo(self.args.clean_repo) if not self.args.dryrun and repo_status < 0: Logger.errorout('Repo Error, Look at logs for details') repo_check_status = self.repo_manager.check_for_update() Logger.add_track_info(self.repo_manager.track) triggered = repo_check_status['triggered'] or repo_status == 1 Logger.info('Repo pull', output=repo_check_status['output'], triggered=triggered) if not self.args.dryrun and not self.args.skip_repo_sync and not triggered: Logger.info('No repo update found', complete=False) self.print_track_info(False) sys.exit(0) self.repo_manager.set_commit_id() # Load in deploymentmethods.conf self.deployment_manager = DeploymentMethodsManager( self.repo_name, "", self.scratch_location) # Generate hosts if self.args.hosts: # Incase one long string is entered if len(self.args.hosts) == 1: self.args.hosts = self.args.hosts[0].split(' ') for host in self.args.hosts: split_hostname = host.strip("'\"").split(':') clean_hostname = split_hostname[0].split('.')[0].strip("'\"") # With user name, the ssh hostname can be defined. This allows # for IP addresses to be defined incase there is no DNS. host_data = Helpers.pull_class_from_host( self.name_formatting, clean_hostname, self.host_classes) if host_data: # Can use a specified hostname/IP. # Default is the given hostname ssh_host = split_hostname[len(split_hostname) - 1] self.appetite_hosts.add_host(self, clean_hostname, host_data, ssh_host) else: # Create hosts based on classes for host_class in self.host_classes: self.appetite_hosts.add_host( self, Helpers.build_hostname( self.name_formatting, # pylint: disable=no-value-for-parameter host_class, 1)) if self.appetite_hosts.is_empty(): Logger.errorout("No hosts found after filtering") if self.args.clean_metas: Helpers.delete_path(self.meta_folder) # Only update if a manifest file is not found self.update_manifests(check_if_exists=True) Logger.info("appetite started", use_templating=self.args.templating, firstrun=self.args.firstrun) self.populate_apps_to_hosts() changes_found = self.create_host_directories_and_tar() if changes_found: Logger.info("Start host updates") self.update_hosts() Logger.info("End host updates") self.print_track_info(changes_found) Logger.info("Appetite complete", complete=True, changes=changes_found)