def update(self, timeout_mins=240): # time rounded to seconds timestamp = int(time.time()) stack_params = {"UpdateIdentifier": timestamp} tpl_files, template = template_utils.get_template_contents( template_file=os.path.join(self.tht_dir, TEMPLATE_NAME) ) env_paths = [] if self.environment_files: env_paths.extend(self.environment_files) env_files, env = template_utils.process_multiple_environments_and_files(env_paths=env_paths) template_utils.deep_update( env, {"resource_registry": {"resources": {"*": {"*": {UPDATE_RESOURCE_NAME: {"hooks": "pre-update"}}}}}} ) fields = { "existing": True, "stack_id": self.stack.id, "template": template, "files": dict(list(tpl_files.items()) + list(env_files.items())), "environment": env, "parameters": stack_params, "timeout_mins": timeout_mins, } LOG.info("updating stack: %s", self.stack.stack_name) LOG.debug("stack update params: %s", fields) self.heatclient.stacks.update(**fields)
def add_breakpoints_cleanup_into_env(env): template_utils.deep_update(env, { 'resource_registry': { 'resources': {'*': {'*': { constants.UPDATE_RESOURCE_NAME: {'hooks': []}}}} } })
def _heat_deploy(self, stack, stack_name, template_path, parameters, created_env_files, timeout, tht_root, env): """Verify the Baremetal nodes are available and do a stack update""" clients = self.app.client_manager workflow_client = clients.workflow_engine self.log.debug("Processing environment files %s" % created_env_files) env_files, localenv = ( template_utils.process_multiple_environments_and_files( created_env_files)) # Command line has more precedence than env files template_utils.deep_update(localenv, env) if stack: update.add_breakpoints_cleanup_into_env(localenv) self.log.debug("Getting template contents from plan %s" % stack_name) # We need to reference the plan here, not the local # tht root, as we need template_object to refer to # the rendered overcloud.yaml, not the tht_root overcloud.j2.yaml # FIXME(shardy) we need to move more of this into mistral actions plan_yaml_path = os.path.relpath(template_path, tht_root) # heatclient template_utils needs a function that can # retrieve objects from a container by name/path objectclient = clients.tripleoclient.object_store def do_object_request(method='GET', object_path=None): obj = objectclient.get_object(stack_name, object_path) return obj and obj[1] template_files, template = template_utils.get_template_contents( template_object=plan_yaml_path, object_request=do_object_request) files = dict(list(template_files.items()) + list(env_files.items())) number_controllers = int(parameters.get('ControllerCount', 0)) if number_controllers > 1: if not localenv.get('parameter_defaults').get('NtpServer'): raise exceptions.InvalidConfiguration( 'Specify --ntp-server as parameter or NtpServer in ' 'environments when using multiple controllers ' '(with HA).') clients = self.app.client_manager moved_files = self._upload_missing_files( stack_name, objectclient, files, tht_root) self._process_and_upload_environment( stack_name, objectclient, localenv, moved_files, tht_root, workflow_client) deployment.deploy_and_wait(self.log, clients, stack, stack_name, self.app_args.verbose_level, timeout)
def update(self): # time rounded to seconds, we explicitly convert to string because of # tuskar timestamp = str(int(time.time())) if self.tuskarclient: stack_params = self._set_update_params(timestamp) self.tht_dir = libutils.save_templates( self.tuskarclient.plans.templates(self.plan.uuid)) tpl_name = 'plan.yaml' env_name = 'environment.yaml' else: tpl_name = TEMPLATE_NAME env_name = REGISTRY_NAME stack_params = {'UpdateIdentifier': timestamp} try: tpl_files, template = template_utils.get_template_contents( template_file=os.path.join(self.tht_dir, tpl_name)) env_paths = [os.path.join(self.tht_dir, env_name)] if self.environment_files: env_paths.extend(self.environment_files) env_files, env = ( template_utils.process_multiple_environments_and_files( env_paths=env_paths)) template_utils.deep_update(env, { 'resource_registry': { 'resources': { '*': { '*': { self.hook_resource: {'hooks': 'pre-update'} } } } } }) fields = { 'existing': True, 'stack_id': self.stack.id, 'template': template, 'files': dict(list(tpl_files.items()) + list(env_files.items())), 'environment': env, 'parameters': stack_params } LOG.info('updating stack: %s', self.stack.stack_name) LOG.debug('stack update params: %s', fields) self.heatclient.stacks.update(**fields) finally: if self.tuskarclient: if LOG.isEnabledFor(logging.DEBUG): LOG.debug("Tuskar templates saved in %s", self.tht_dir) else: shutil.rmtree(self.tht_dir)
def _deploy_tripleo_heat_templates(self, stack, parsed_args): """Deploy the fixed templates in TripleO Heat Templates""" clients = self.app.client_manager network_client = clients.network workflow_client = clients.workflow_engine parameters = self._update_parameters( parsed_args, network_client, stack) tht_root = os.path.abspath(parsed_args.templates) plans = plan_management.list_deployment_plans(workflow_client) # TODO(d0ugal): We need to put a more robust strategy in place here to # handle updating plans. if parsed_args.stack in plans: # Upload the new plan templates to swift to replace the existing # templates. plan_management.update_plan_from_templates( clients, parsed_args.stack, tht_root) else: plan_management.create_plan_from_templates( clients, parsed_args.stack, tht_root) print("Deploying templates in the directory {0}".format( os.path.abspath(tht_root))) self.log.debug("Creating Environment file") # TODO(jprovazn): env file generated by create_environment_file() # is not very usable any more, scale params are included in # parameters and keystone cert is generated on create only env_path = utils.create_environment_file() env = {} created_env_files = [] if stack is None: self.log.debug("Creating Keystone certificates") keystone_pki.generate_certs_into_json(env_path, False) created_env_files.append(env_path) if parsed_args.environment_directories: created_env_files.extend(self._load_environment_directories( parsed_args.environment_directories)) env.update(self._create_parameters_env(parameters)) if parsed_args.rhel_reg: reg_env_files, reg_env = self._create_registration_env(parsed_args) created_env_files.extend(reg_env_files) template_utils.deep_update(env, reg_env) if parsed_args.environment_files: created_env_files.extend(parsed_args.environment_files) self._try_overcloud_deploy_with_compat_yaml( tht_root, stack, parsed_args.stack, parameters, created_env_files, parsed_args.timeout, env)
def run(self, context): # get the stack. Error if doesn't exist heat = self.get_orchestration_client(context) try: stack = heat.stacks.get(self.container) except heat_exc.HTTPNotFound: msg = "Error retrieving stack: %s" % self.container LOG.exception(msg) return actions.Result(error=msg) swift = self.get_object_client(context) try: env = plan_utils.get_env(swift, self.container) except swiftexceptions.ClientException as err: err_msg = ("Error retrieving environment for plan %s: %s" % (self.container, err)) LOG.exception(err_msg) return actions.Result(error=err_msg) update_env = {} noop_env = { 'resource_registry': { 'OS::TripleO::DeploymentSteps': 'OS::Heat::None', }, } for output in stack.to_dict().get('outputs', {}): if output['output_key'] == 'RoleData': for role in output['output_value']: role_env = { "OS::TripleO::Tasks::%sPreConfig" % role: 'OS::Heat::None', "OS::TripleO::Tasks::%sPostConfig" % role: 'OS::Heat::None', } noop_env['resource_registry'].update(role_env) update_env.update(noop_env) template_utils.deep_update(env, update_env) # process all plan files and create or update a stack processed_data = super(UpdateStackAction, self).run(context) # If we receive a 'Result' instance it is because the parent action # had an error. if isinstance(processed_data, actions.Result): return processed_data stack_args = processed_data.copy() LOG.info("Performing Heat stack update") LOG.info('updating stack: %s', stack.stack_name) return heat.stacks.update(stack.id, **stack_args)
def add_breakpoints_cleanup_into_env(env): template_utils.deep_update( env, { 'resource_registry': { 'resources': { '*': { '*': { constants.UPDATE_RESOURCE_NAME: { 'hooks': [] } } } } } })
def update(self, timeout_mins=constants.STACK_TIMEOUT_DEFAULT): env = {} if 'environment' in self.stack_fields: env = self.stack_fields['environment'] template_utils.deep_update( env, { 'resource_registry': { 'resources': { '*': { '*': { constants.UPDATE_RESOURCE_NAME: { 'hooks': 'pre-update' } } } } } }) # time rounded to seconds timestamp = int(time.time()) stack_params = { 'DeployIdentifier': timestamp, 'UpdateIdentifier': timestamp, 'StackAction': 'UPDATE' } template_utils.deep_update(env, {'parameter_defaults': stack_params}) self.stack_fields['environment'] = env fields = { 'existing': True, 'stack_id': self.stack.id, 'template': self.stack_fields['template'], 'files': self.stack_fields['files'], 'environment': self.stack_fields['environment'], 'timeout_mins': timeout_mins, 'stack_name': self.stack_fields['stack_name'], } LOG.info('updating stack: %s', self.stack.stack_name) LOG.debug('stack update params: %s', fields) self.heatclient.stacks.update(**fields)
def update(self, timeout_mins=constants.STACK_TIMEOUT_DEFAULT): env = {} if 'environment' in self.stack_fields: env = self.stack_fields['environment'] template_utils.deep_update(env, { 'resource_registry': { 'resources': { '*': { '*': { constants.UPDATE_RESOURCE_NAME: { 'hooks': 'pre-update'} } } } } }) # time rounded to seconds timestamp = int(time.time()) stack_params = { 'DeployIdentifier': timestamp, 'UpdateIdentifier': timestamp, 'StackAction': 'UPDATE' } template_utils.deep_update(env, {'parameter_defaults': stack_params}) self.stack_fields['environment'] = env fields = { 'existing': True, 'stack_id': self.stack.id, 'template': self.stack_fields['template'], 'files': self.stack_fields['files'], 'environment': self.stack_fields['environment'], 'timeout_mins': timeout_mins, 'stack_name': self.stack_fields['stack_name'], } LOG.info('updating stack: %s', self.stack.stack_name) LOG.debug('stack update params: %s', fields) self.heatclient.stacks.update(**fields)
def update(self, timeout_mins=240): # time rounded to seconds timestamp = int(time.time()) stack_params = {'UpdateIdentifier': timestamp} tpl_files, template = template_utils.get_template_contents( template_file=os.path.join(self.tht_dir, TEMPLATE_NAME)) env_paths = [] if self.environment_files: env_paths.extend(self.environment_files) env_files, env = ( template_utils.process_multiple_environments_and_files( env_paths=env_paths)) template_utils.deep_update( env, { 'resource_registry': { 'resources': { '*': { '*': { UPDATE_RESOURCE_NAME: { 'hooks': 'pre-update' } } } } } }) fields = { 'existing': True, 'stack_id': self.stack.id, 'template': template, 'files': dict(list(tpl_files.items()) + list(env_files.items())), 'environment': env, 'parameters': stack_params, 'timeout_mins': timeout_mins, } LOG.info('updating stack: %s', self.stack.stack_name) LOG.debug('stack update params: %s', fields) self.heatclient.stacks.update(**fields)
def update(self, timeout_mins=constants.STACK_TIMEOUT_DEFAULT): # time rounded to seconds timestamp = int(time.time()) stack_params = {'UpdateIdentifier': timestamp, 'StackAction': 'UPDATE'} tpl_files, template = template_utils.get_template_contents( template_file=os.path.join(self.tht_dir, constants.TEMPLATE_NAME)) env_paths = [] if self.environment_files: env_paths.extend(self.environment_files) env_files, env = ( template_utils.process_multiple_environments_and_files( env_paths=env_paths)) template_utils.deep_update(env, { 'resource_registry': { 'resources': { '*': { '*': { constants.UPDATE_RESOURCE_NAME: { 'hooks': 'pre-update'} } } } } }) fields = { 'existing': True, 'stack_id': self.stack.id, 'template': template, 'files': dict(list(tpl_files.items()) + list(env_files.items())), 'environment': env, 'parameters': stack_params, 'timeout_mins': timeout_mins, } LOG.info('updating stack: %s', self.stack.stack_name) LOG.debug('stack update params: %s', fields) self.heatclient.stacks.update(**fields)
def build_env_paths(swift, container, plan_env): environments = plan_env.get('environments', []) env_paths = [] temp_env_paths = [] for env in environments: if env.get('path'): env_paths.append(os.path.join(swift.url, container, env['path'])) elif env.get('data'): env_file = write_json_temp_file(env['data']) temp_env_paths.append(env_file) # create a dict to hold all user set params and merge # them in the appropriate order merged_params = {} # merge generated passwords into params first passwords = plan_env.get('passwords', {}) merged_params.update(passwords) # derived parameters are merged before 'parameter defaults' # so that user-specified values can override the derived values. derived_params = plan_env.get('derived_parameters', {}) merged_params.update(derived_params) # handle user set parameter values next in case a user has set # a new value for a password parameter params = plan_env.get('parameter_defaults', {}) merged_params = template_utils.deep_update(merged_params, params) if merged_params: env_temp_file = write_json_temp_file( {'parameter_defaults': merged_params}) temp_env_paths.append(env_temp_file) registry = plan_env.get('resource_registry', {}) if registry: env_temp_file = write_json_temp_file( {'resource_registry': registry}) temp_env_paths.append(env_temp_file) env_paths.extend(temp_env_paths) return env_paths, temp_env_paths
def _deploy_tripleo_heat_templates(self, stack, parsed_args, tht_root, user_tht_root): """Deploy the fixed templates in TripleO Heat Templates""" plans = plan_management.list_deployment_plans(self.clients) generate_passwords = not parsed_args.disable_password_generation # TODO(d0ugal): We need to put a more robust strategy in place here to # handle updating plans. if parsed_args.stack in plans: # Upload the new plan templates to swift to replace the existing # templates. plan_management.update_plan_from_templates( self.clients, parsed_args.stack, tht_root, parsed_args.roles_file, generate_passwords, parsed_args.plan_environment_file, parsed_args.networks_file, type(self)._keep_env_on_update) else: plan_management.create_plan_from_templates( self.clients, parsed_args.stack, tht_root, parsed_args.roles_file, generate_passwords, parsed_args.plan_environment_file, parsed_args.networks_file) # Get any missing (e.g j2 rendered) files from the plan to tht_root self._download_missing_files_from_plan(tht_root, parsed_args.stack) print("Processing templates in the directory {0}".format( os.path.abspath(tht_root))) self.log.debug("Creating Environment files") env = {} created_env_files = [] if parsed_args.environment_directories: created_env_files.extend( utils.load_environment_directories( parsed_args.environment_directories)) parameters = {} if stack: try: # If user environment already exist then keep it user_env = yaml.safe_load( self.object_client.get_object( parsed_args.stack, constants.USER_ENVIRONMENT)[1]) template_utils.deep_update(env, user_env) except ClientException: pass parameters.update(self._update_parameters(parsed_args, stack)) template_utils.deep_update( env, self._create_parameters_env(parameters, tht_root, parsed_args.stack)) if parsed_args.rhel_reg: reg_env_files, reg_env = self._create_registration_env( parsed_args, tht_root) created_env_files.extend(reg_env_files) template_utils.deep_update(env, reg_env) if parsed_args.environment_files: created_env_files.extend(parsed_args.environment_files) self.log.debug("Processing environment files %s" % created_env_files) env_files, localenv = utils.process_multiple_environments( created_env_files, tht_root, user_tht_root, cleanup=not parsed_args.no_cleanup) template_utils.deep_update(env, localenv) if stack: bp_cleanup = self._create_breakpoint_cleanup_env( tht_root, parsed_args.stack) template_utils.deep_update(env, bp_cleanup) # FIXME(shardy) It'd be better to validate this via mistral # e.g part of the plan create/update workflow number_controllers = int(parameters.get('ControllerCount', 0)) if number_controllers > 1: if not env.get('parameter_defaults').get('NtpServer'): raise exceptions.InvalidConfiguration( 'Specify --ntp-server as parameter or NtpServer in ' 'environments when using multiple controllers ' '(with HA).') self._try_overcloud_deploy_with_compat_yaml( tht_root, stack, parsed_args.stack, parameters, env_files, parsed_args.timeout, env, parsed_args.update_plan_only, parsed_args.run_validations, parsed_args.skip_deploy_identifier, parsed_args.plan_environment_file)
def run(self, context): # get the stack. Error if doesn't exist heat = self.get_orchestration_client(context) try: stack = heat.stacks.get(self.container) except heat_exc.HTTPNotFound: msg = "Error retrieving stack: %s" % self.container LOG.exception(msg) return actions.Result(error=msg) swift = self.get_object_client(context) try: env = plan_utils.get_env(swift, self.container) except swiftexceptions.ClientException as err: err_msg = ("Error retrieving environment for plan %s: %s" % ( self.container, err)) LOG.exception(err_msg) return actions.Result(error=err_msg) update_env = { 'parameter_defaults': { 'DeployIdentifier': int(time.time()), }, } noop_env = { 'resource_registry': { 'OS::TripleO::DeploymentSteps': 'OS::Heat::None', }, } for output in stack.to_dict().get('outputs', {}): if output['output_key'] == 'RoleData': for role in output['output_value']: role_env = { "OS::TripleO::Tasks::%sPreConfig" % role: 'OS::Heat::None', "OS::TripleO::Tasks::%sPostConfig" % role: 'OS::Heat::None', } noop_env['resource_registry'].update(role_env) update_env.update(noop_env) template_utils.deep_update(env, update_env) try: plan_utils.put_env(swift, env) except swiftexceptions.ClientException as err: err_msg = ("Error updating environment for plan %s: %s" % ( self.container, err)) LOG.exception(err_msg) return actions.Result(error=err_msg) # process all plan files and create or update a stack processed_data = super(UpdateStackAction, self).run(context) # If we receive a 'Result' instance it is because the parent action # had an error. if isinstance(processed_data, actions.Result): return processed_data stack_args = processed_data.copy() LOG.info("Performing Heat stack update") LOG.info('updating stack: %s', stack.stack_name) return heat.stacks.update(stack.id, **stack_args)
def add_breakpoints_cleanup_into_env(env): template_utils.deep_update( env, {"resource_registry": {"resources": {"*": {"*": {UPDATE_RESOURCE_NAME: {"hooks": []}}}}}} )
def _deploy_tripleo_heat_templates(self, stack, parsed_args, tht_root, user_tht_root): """Deploy the fixed templates in TripleO Heat Templates""" clients = self.app.client_manager network_client = clients.network workflow_client = clients.workflow_engine parameters = self._update_parameters( parsed_args, network_client, stack) plans = plan_management.list_deployment_plans(workflow_client) # TODO(d0ugal): We need to put a more robust strategy in place here to # handle updating plans. if parsed_args.stack in plans: # Upload the new plan templates to swift to replace the existing # templates. plan_management.update_plan_from_templates( clients, parsed_args.stack, tht_root, parsed_args.roles_file) else: plan_management.create_plan_from_templates( clients, parsed_args.stack, tht_root, parsed_args.roles_file) # Get any missing (e.g j2 rendered) files from the plan to tht_root added_files = self._download_missing_files_from_plan( tht_root, parsed_args.stack) print("Deploying templates in the directory {0}".format( os.path.abspath(tht_root))) self.log.debug("Creating Environment file") # TODO(jprovazn): env file generated by create_environment_file() # is not very usable any more, scale params are included in # parameters and keystone cert is generated on create only env_path = utils.create_environment_file() env = {} created_env_files = [] if stack is None: self.log.debug("Creating Keystone certificates") keystone_pki.generate_certs_into_json(env_path, False) created_env_files.append(env_path) if parsed_args.environment_directories: created_env_files.extend(self._load_environment_directories( parsed_args.environment_directories)) env.update(self._create_parameters_env(parameters)) if parsed_args.rhel_reg: reg_env_files, reg_env = self._create_registration_env(parsed_args) created_env_files.extend(reg_env_files) template_utils.deep_update(env, reg_env) if parsed_args.environment_files: created_env_files.extend(parsed_args.environment_files) self.log.debug("Processing environment files %s" % created_env_files) env_files, localenv = self._process_multiple_environments( created_env_files, added_files, tht_root, user_tht_root, cleanup=not parsed_args.no_cleanup) template_utils.deep_update(env, localenv) self._try_overcloud_deploy_with_compat_yaml( tht_root, stack, parsed_args.stack, parameters, env_files, parsed_args.timeout, env, parsed_args.update_plan_only)
def _process_multiple_environments(self, created_env_files, added_files, tht_root, user_tht_root, cleanup=True): env_files = {} localenv = {} for env_path in created_env_files: self.log.debug("Processing environment files %s" % env_path) abs_env_path = os.path.abspath(env_path) if abs_env_path.startswith(user_tht_root): new_env_path = abs_env_path.replace(user_tht_root, tht_root) self.log.debug("Redirecting env file %s to %s" % (abs_env_path, new_env_path)) env_path = new_env_path try: files, env = template_utils.process_environment_and_files( env_path=env_path) except hc_exc.CommandError as ex: # This provides fallback logic so that we can reference files # inside the resource_registry values that may be rendered via # j2.yaml templates, where the above will fail because the # file doesn't exist in user_tht_root, but it is in tht_root # See bug https://bugs.launchpad.net/tripleo/+bug/1625783 # for details on why this is needed (backwards-compatibility) self.log.debug("Error %s processing environment file %s" % (six.text_type(ex), env_path)) with open(abs_env_path, 'r') as f: env_map = yaml.safe_load(f) env_registry = env_map.get('resource_registry', {}) env_dirname = os.path.dirname(os.path.abspath(env_path)) for rsrc, rsrc_path in six.iteritems(env_registry): # We need to calculate the absolute path relative to # env_path not cwd (which is what abspath uses). abs_rsrc_path = os.path.normpath( os.path.join(env_dirname, rsrc_path)) # If the absolute path matches user_tht_root, rewrite # a temporary environment pointing at tht_root instead if abs_rsrc_path.startswith(user_tht_root): new_rsrc_path = abs_rsrc_path.replace(user_tht_root, tht_root) self.log.debug("Rewriting %s %s path to %s" % (env_path, rsrc, new_rsrc_path)) env_registry[rsrc] = new_rsrc_path else: env_registry[rsrc] = abs_rsrc_path env_map['resource_registry'] = env_registry f_name = os.path.basename(os.path.splitext(abs_env_path)[0]) with tempfile.NamedTemporaryFile(dir=tht_root, prefix="env-%s-" % f_name, suffix=".yaml", mode="w", delete=cleanup) as f: self.log.debug("Rewriting %s environment to %s" % (env_path, f.name)) f.write(yaml.safe_dump(env_map, default_flow_style=False)) f.flush() files, env = template_utils.process_environment_and_files( env_path=f.name) if files: self.log.debug("Adding files %s for %s" % (files, env_path)) env_files.update(files) # 'env' can be a deeply nested dictionary, so a simple update is # not enough localenv = template_utils.deep_update(localenv, env) return env_files, localenv
def run(self, context): error_text = None self.context = context swift = self.get_object_client(context) try: plan_env = plan_utils.get_env(swift, self.container) except swiftexceptions.ClientException as err: err_msg = ("Error retrieving environment for plan %s: %s" % ( self.container, err)) LOG.exception(err_msg) return actions.Result(error=error_text) try: # if the jinja overcloud template exists, process it and write it # back to the swift container before continuing processing. The # method called below should handle the case where the files are # not found in swift, but if they are found and an exception # occurs during processing, that exception will cause the # ProcessTemplatesAction to return an error result. self._process_custom_roles(context) except Exception as err: LOG.exception("Error occurred while processing custom roles.") return actions.Result(error=six.text_type(err)) template_name = plan_env.get('template') environments = plan_env.get('environments') env_paths = [] temp_files = [] template_object = os.path.join(swift.url, self.container, template_name) LOG.debug('Template: %s' % template_name) LOG.debug('Environments: %s' % environments) try: for env in environments: if env.get('path'): env_paths.append(os.path.join(swift.url, self.container, env['path'])) elif env.get('data'): env_temp_file = _create_temp_file(env['data']) temp_files.append(env_temp_file) env_paths.append(env_temp_file) # create a dict to hold all user set params and merge # them in the appropriate order merged_params = {} # merge generated passwords into params first passwords = plan_env.get('passwords', {}) merged_params.update(passwords) # derived parameters are merged before 'parameter defaults' # so that user-specified values can override the derived values. derived_params = plan_env.get('derived_parameters', {}) merged_params.update(derived_params) # handle user set parameter values next in case a user has set # a new value for a password parameter params = plan_env.get('parameter_defaults', {}) merged_params = template_utils.deep_update(merged_params, params) if merged_params: env_temp_file = _create_temp_file( {'parameter_defaults': merged_params}) temp_files.append(env_temp_file) env_paths.append(env_temp_file) def _env_path_is_object(env_path): retval = env_path.startswith(swift.url) LOG.debug('_env_path_is_object %s: %s' % (env_path, retval)) return retval def _object_request(method, url, token=context.auth_token): return requests.request( method, url, headers={'X-Auth-Token': token}).content template_files, template = template_utils.get_template_contents( template_object=template_object, object_request=_object_request) env_files, env = ( template_utils.process_multiple_environments_and_files( env_paths=env_paths, env_path_is_object=_env_path_is_object, object_request=_object_request)) except Exception as err: error_text = six.text_type(err) LOG.exception("Error occurred while processing plan files.") finally: # cleanup any local temp files for f in temp_files: os.remove(f) if error_text: return actions.Result(error=error_text) files = dict(list(template_files.items()) + list(env_files.items())) return { 'stack_name': self.container, 'template': template, 'environment': env, 'files': files }
def run(self, context): # get the stack. Error if doesn't exist heat = self.get_orchestration_client(context) try: stack = heat.stacks.get(self.container) except heat_exc.HTTPNotFound: msg = "Error retrieving stack: %s" % self.container LOG.exception(msg) return actions.Result(error=msg) parameters = dict() timestamp = int(time.time()) parameters['DeployIdentifier'] = timestamp parameters['UpdateIdentifier'] = timestamp parameters['StackAction'] = 'UPDATE' swift = self.get_object_client(context) try: env = plan_utils.get_env(swift, self.container) except swiftexceptions.ClientException as err: err_msg = ("Error retrieving environment for plan %s: %s" % (self.container, err)) LOG.exception(err_msg) return actions.Result(error=err_msg) try: plan_utils.update_in_env(swift, env, 'parameter_defaults', parameters) except swiftexceptions.ClientException as err: err_msg = ("Error updating environment for plan %s: %s" % (self.container, err)) LOG.exception(err_msg) return actions.Result(error=err_msg) # process all plan files and create or update a stack processed_data = super(UpdateStackAction, self).run(context) # If we receive a 'Result' instance it is because the parent action # had an error. if isinstance(processed_data, actions.Result): return processed_data stack_args = processed_data.copy() env = stack_args.get('environment', {}) template_utils.deep_update( env, { 'resource_registry': { 'resources': { '*': { '*': { constants.UPDATE_RESOURCE_NAME: { 'hooks': 'pre-update' } } } } } }) stack_args['environment'] = env stack_args['timeout_mins'] = self.timeout_mins stack_args['existing'] = 'true' LOG.info("Performing Heat stack update") LOG.info('updating stack: %s', stack.stack_name) return heat.stacks.update(stack.id, **stack_args)
def run(self): # get the stack. Error if doesn't exist heat = self.get_orchestration_client() try: stack = heat.stacks.get(self.container) except heat_exc.HTTPNotFound: msg = "Error retrieving stack: %s" % self.container LOG.exception(msg) return mistral_workflow_utils.Result(error=msg) parameters = dict() timestamp = int(time.time()) parameters['DeployIdentifier'] = timestamp parameters['UpdateIdentifier'] = timestamp parameters['StackAction'] = 'UPDATE' wc = self.get_workflow_client() try: wf_env = wc.environments.get(self.container) except Exception: msg = "Error retrieving mistral environment: %s" % self.container LOG.exception(msg) return mistral_workflow_utils.Result(error=msg) if 'parameter_defaults' not in wf_env.variables: wf_env.variables['parameter_defaults'] = {} wf_env.variables['parameter_defaults'].update(parameters) env_kwargs = { 'name': wf_env.name, 'variables': wf_env.variables, } # store params changes back to db before call to process templates wc.environments.update(**env_kwargs) # process all plan files and create or update a stack processed_data = super(UpdateStackAction, self).run() # If we receive a 'Result' instance it is because the parent action # had an error. if isinstance(processed_data, mistral_workflow_utils.Result): return processed_data stack_args = processed_data.copy() env = stack_args.get('environment', {}) template_utils.deep_update( env, { 'resource_registry': { 'resources': { '*': { '*': { constants.UPDATE_RESOURCE_NAME: { 'hooks': 'pre-update' } } } } } }) stack_args['environment'] = env stack_args['timeout_mins'] = self.timeout_mins stack_args['existing'] = 'true' LOG.info("Performing Heat stack update") LOG.info('updating stack: %s', stack.stack_name) return heat.stacks.update(stack.id, **stack_args)
def run(self, context): # get the stack. Error if doesn't exist heat = self.get_orchestration_client(context) try: stack = heat.stacks.get(self.container) except heat_exc.HTTPNotFound: msg = "Error retrieving stack: %s" % self.container LOG.exception(msg) return actions.Result(error=msg) parameters = dict() timestamp = int(time.time()) parameters['DeployIdentifier'] = timestamp parameters['UpdateIdentifier'] = timestamp parameters['StackAction'] = 'UPDATE' swift = self.get_object_client(context) try: env = plan_utils.get_env(swift, self.container) except swiftexceptions.ClientException as err: err_msg = ("Error retrieving environment for plan %s: %s" % ( self.container, err)) LOG.exception(err_msg) return actions.Result(error=err_msg) try: plan_utils.update_in_env(swift, env, 'parameter_defaults', parameters) except swiftexceptions.ClientException as err: err_msg = ("Error updating environment for plan %s: %s" % ( self.container, err)) LOG.exception(err_msg) return actions.Result(error=err_msg) # process all plan files and create or update a stack processed_data = super(UpdateStackAction, self).run(context) # If we receive a 'Result' instance it is because the parent action # had an error. if isinstance(processed_data, actions.Result): return processed_data stack_args = processed_data.copy() env = stack_args.get('environment', {}) template_utils.deep_update(env, { 'resource_registry': { 'resources': { '*': { '*': { constants.UPDATE_RESOURCE_NAME: { 'hooks': 'pre-update'} } } } } }) stack_args['environment'] = env stack_args['timeout_mins'] = self.timeout_mins stack_args['existing'] = 'true' LOG.info("Performing Heat stack update") LOG.info('updating stack: %s', stack.stack_name) return heat.stacks.update(stack.id, **stack_args)
def process_multiple_environments(created_env_files, tht_root, user_tht_root, cleanup=True): log = logging.getLogger(__name__ + ".process_multiple_environments") env_files = {} localenv = {} # Normalize paths for full match checks user_tht_root = os.path.normpath(user_tht_root) tht_root = os.path.normpath(tht_root) for env_path in created_env_files: log.debug("Processing environment files %s" % env_path) abs_env_path = os.path.abspath(env_path) if (abs_env_path.startswith(user_tht_root) and ((user_tht_root + '/') in env_path or (user_tht_root + '/') in abs_env_path or user_tht_root == abs_env_path or user_tht_root == env_path)): new_env_path = env_path.replace(user_tht_root + '/', tht_root + '/') log.debug("Redirecting env file %s to %s" % (abs_env_path, new_env_path)) env_path = new_env_path try: files, env = template_utils.process_environment_and_files( env_path=env_path) except hc_exc.CommandError as ex: # This provides fallback logic so that we can reference files # inside the resource_registry values that may be rendered via # j2.yaml templates, where the above will fail because the # file doesn't exist in user_tht_root, but it is in tht_root # See bug https://bugs.launchpad.net/tripleo/+bug/1625783 # for details on why this is needed (backwards-compatibility) log.debug("Error %s processing environment file %s" % (six.text_type(ex), env_path)) # Use the temporary path as it's possible the environment # itself was rendered via jinja. with open(env_path, 'r') as f: env_map = yaml.safe_load(f) env_registry = env_map.get('resource_registry', {}) env_dirname = os.path.dirname(os.path.abspath(env_path)) for rsrc, rsrc_path in six.iteritems(env_registry): # We need to calculate the absolute path relative to # env_path not cwd (which is what abspath uses). abs_rsrc_path = os.path.normpath( os.path.join(env_dirname, rsrc_path)) # If the absolute path matches user_tht_root, rewrite # a temporary environment pointing at tht_root instead if (abs_rsrc_path.startswith(user_tht_root) and ((user_tht_root + '/') in abs_rsrc_path or abs_rsrc_path == user_tht_root)): new_rsrc_path = abs_rsrc_path.replace( user_tht_root + '/', tht_root + '/') log.debug("Rewriting %s %s path to %s" % (env_path, rsrc, new_rsrc_path)) env_registry[rsrc] = new_rsrc_path else: # Skip any resources that are mapping to OS::* # resource names as these aren't paths if not rsrc_path.startswith("OS::"): env_registry[rsrc] = abs_rsrc_path env_map['resource_registry'] = env_registry f_name = os.path.basename(os.path.splitext(abs_env_path)[0]) with tempfile.NamedTemporaryFile(dir=tht_root, prefix="env-%s-" % f_name, suffix=".yaml", mode="w", delete=cleanup) as f: log.debug("Rewriting %s environment to %s" % (env_path, f.name)) f.write(yaml.safe_dump(env_map, default_flow_style=False)) f.flush() files, env = template_utils.process_environment_and_files( env_path=f.name) if files: log.debug("Adding files %s for %s" % (files, env_path)) env_files.update(files) # 'env' can be a deeply nested dictionary, so a simple update is # not enough localenv = template_utils.deep_update(localenv, env) return env_files, localenv
def run(self, context): error_text = None self.context = context swift = self.get_object_client(context) try: plan_env = plan_utils.get_env(swift, self.container) except swiftexceptions.ClientException as err: err_msg = ("Error retrieving environment for plan %s: %s" % ( self.container, err)) LOG.exception(err_msg) return actions.Result(error=error_text) try: # if the jinja overcloud template exists, process it and write it # back to the swift container before continuing processing. The # method called below should handle the case where the files are # not found in swift, but if they are found and an exception # occurs during processing, that exception will cause the # ProcessTemplatesAction to return an error result. self._process_custom_roles(context) except Exception as err: LOG.exception("Error occurred while processing custom roles.") return actions.Result(error=six.text_type(err)) template_name = plan_env.get('template', "") environments = plan_env.get('environments', []) env_paths = [] temp_files = [] template_object = os.path.join(swift.url, self.container, template_name) LOG.debug('Template: %s' % template_name) LOG.debug('Environments: %s' % environments) try: for env in environments: if env.get('path'): env_paths.append(os.path.join(swift.url, self.container, env['path'])) elif env.get('data'): env_temp_file = _create_temp_file(env['data']) temp_files.append(env_temp_file) env_paths.append(env_temp_file) # create a dict to hold all user set params and merge # them in the appropriate order merged_params = {} # merge generated passwords into params first passwords = plan_env.get('passwords', {}) merged_params.update(passwords) # derived parameters are merged before 'parameter defaults' # so that user-specified values can override the derived values. derived_params = plan_env.get('derived_parameters', {}) merged_params.update(derived_params) # handle user set parameter values next in case a user has set # a new value for a password parameter params = plan_env.get('parameter_defaults', {}) merged_params = template_utils.deep_update(merged_params, params) if merged_params: env_temp_file = _create_temp_file( {'parameter_defaults': merged_params}) temp_files.append(env_temp_file) env_paths.append(env_temp_file) registry = plan_env.get('resource_registry', {}) if registry: env_temp_file = _create_temp_file( {'resource_registry': registry}) temp_files.append(env_temp_file) env_paths.append(env_temp_file) def _env_path_is_object(env_path): retval = env_path.startswith(swift.url) LOG.debug('_env_path_is_object %s: %s' % (env_path, retval)) return retval def _object_request(method, url, token=context.auth_token): response = requests.request( method, url, headers={'X-Auth-Token': token}) response.raise_for_status() return response.content template_files, template = template_utils.get_template_contents( template_object=template_object, object_request=_object_request) env_files, env = ( template_utils.process_multiple_environments_and_files( env_paths=env_paths, env_path_is_object=_env_path_is_object, object_request=_object_request)) except Exception as err: error_text = six.text_type(err) LOG.exception("Error occurred while processing plan files.") finally: # cleanup any local temp files for f in temp_files: os.remove(f) if error_text: return actions.Result(error=error_text) files = dict(list(template_files.items()) + list(env_files.items())) return { 'stack_name': self.container, 'template': template, 'environment': env, 'files': files }