def oscal_dict(self) -> Dict[str, Any]: """Return a dictionary including the root wrapping object key.""" class_name = self.__class__.__name__ result = {} raw_dict = self.dict(by_alias=True, exclude_none=True) # Additional check to avoid root serialization if '__root__' in raw_dict.keys(): result[classname_to_alias(class_name, AliasMode.JSON)] = raw_dict['__root__'] else: result[classname_to_alias(class_name, AliasMode.JSON)] = raw_dict return result
def test_classname_to_alias() -> None: """Test conversion of class name to alias.""" module_name = catalog.Catalog.__module__ short_classname = catalog.Catalog.__name__ full_classname = f'{module_name}.{short_classname}' json_alias = str_utils.classname_to_alias(short_classname, AliasMode.JSON) assert json_alias == 'catalog' json_alias = str_utils.classname_to_alias(full_classname, AliasMode.FIELD) assert json_alias == 'catalog' short_classname = common.ResponsibleParty.__name__ full_classname = f'{module_name}.{short_classname}' json_alias = str_utils.classname_to_alias(short_classname, AliasMode.JSON) assert json_alias == 'responsible-party' json_alias = str_utils.classname_to_alias(full_classname, AliasMode.FIELD) assert json_alias == 'responsible_party' short_classname = common.Property.__name__ full_classname = f'{module_name}.{short_classname}' json_alias = str_utils.classname_to_alias(short_classname, AliasMode.JSON) assert json_alias == 'property' json_alias = str_utils.classname_to_alias(full_classname, AliasMode.FIELD) assert json_alias == 'property' short_classname = common.MemberOfOrganization.__name__ full_classname = f'{module_name}.{short_classname}' json_alias = str_utils.classname_to_alias(short_classname, AliasMode.JSON) assert json_alias == 'member-of-organization' json_alias = str_utils.classname_to_alias(full_classname, AliasMode.FIELD) assert json_alias == 'member_of_organization'
def __init__(self, root_dir: pathlib.Path, model_type: Type[OscalBaseModel], name: str) -> None: """Initialize repository OSCAL model object.""" if not file_utils.is_valid_project_root(root_dir): raise TrestleError(f'Provided root directory {str(root_dir)} is not a valid Trestle root directory.') self._root_dir = root_dir self._model_type = model_type self._model_name = name # set model alais and dir self.model_alias = classname_to_alias(self._model_type.__name__, AliasMode.JSON) if parser.to_full_model_name(self.model_alias) is None: raise TrestleError(f'Given model {self.model_alias} is not a top level model.') plural_path = ModelUtils.model_type_to_model_dir(self.model_alias) self.model_dir = self._root_dir / plural_path / self._model_name if not self.model_dir.exists() or not self.model_dir.is_dir(): raise TrestleError(f'Model dir {self._model_name} does not exist.') file_content_type = FileContentType.path_to_content_type(self.model_dir / self.model_alias) if file_content_type == FileContentType.UNKNOWN: raise TrestleError(f'Model file for model {self._model_name} does not exist.') self.file_content_type = file_content_type filepath = pathlib.Path( self.model_dir, self.model_alias + FileContentType.path_to_file_extension(self.model_dir / self.model_alias) ) self.filepath = filepath
def write(self, model: OscalBaseModel) -> bool: """Write OSCAL model to repository.""" logger.debug(f'Writing model {self._model_name}.') model_alias = classname_to_alias(model.__class__.__name__, AliasMode.JSON) if parser.to_full_model_name(model_alias) is None: raise TrestleError(f'Given model {model_alias} is not a top level model.') # split directory if the model was split split_dir = pathlib.Path(self.model_dir, self.model_alias) # Prepare actions; delete split model dir if any, recreate model file, and write to filepath top_element = Element(model) remove_action = RemovePathAction(split_dir) create_action = CreatePathAction(self.filepath, True) write_action = WriteFileAction(self.filepath, top_element, self.file_content_type) # create a plan to create the directory and imported file. import_plan = Plan() import_plan.add_action(remove_action) import_plan.add_action(create_action) import_plan.add_action(write_action) import_plan.execute() logger.debug(f'Model {self._model_name} written to repository') return True
def add_from_args(self, args: argparse.Namespace) -> int: """Parse args for add element to file.""" file_path = pathlib.Path(args.file).resolve() # Get parent model and then load json into parent model parent_model, _ = ModelUtils.get_stripped_model_type(file_path, args.trestle_root) parent_object = parent_model.oscal_read(file_path) parent_element = Element(parent_object, classname_to_alias(parent_model.__name__, AliasMode.JSON)) add_plan = Plan() # Do _add for each element_path specified in args element_paths: List[str] = args.element.split(',') for elm_path_str in element_paths: element_path = ElementPath(elm_path_str) update_action, parent_element = self.add(element_path, parent_element, args.include_optional_fields) add_plan.add_action(update_action) create_action = CreatePathAction(file_path, True) # this will output json or yaml based on type of input file write_action = WriteFileAction(file_path, parent_element, FileContentType.to_content_type(file_path.suffix)) add_plan.add_action(create_action) add_plan.add_action(write_action) add_plan.execute() return CmdReturnCodes.SUCCESS.value
def __init__(self, elem: OscalBaseModel, wrapper_alias: str = ''): """Initialize an element wrapper. wrapper_alias is the OSCAL alias for the given elem object and used for seriazation in to_json() method. For example, - List[Catalog.Group] element should have wrapper alias 'groups' - Catalog element should have wrapper alias 'catalog' wrapper_alias is deduced for collection type object if wrapper_alias = IGNORE_WRAPPER_ALIAS, then it is ignored and assumed to be json-serializable during to_json() """ # FIXME: There are instances where elem is a list. self._elem: OscalBaseModel = elem if wrapper_alias == '' and wrapper_alias != self.IGNORE_WRAPPER_ALIAS: class_name = elem.__class__.__name__ if utils.is_collection_field_type(elem): class_name = self._get_singular_classname() if class_name is None: raise TrestleError( f'wrapper_alias not found for a collection type object: {elem.__class__.__name__}' ) wrapper_alias = str_utils.classname_to_alias( class_name, AliasMode.JSON) self._wrapper_alias: str = wrapper_alias
def split_model( cls, model_obj: OscalBaseModel, element_paths: List[ElementPath], base_dir: pathlib.Path, content_type: FileContentType, root_file_name: str, aliases_to_strip: Dict[str, AliasTracker] ) -> Plan: """Split the model at the provided element paths. It returns a plan for the operation """ # initialize plan split_plan = Plan() # loop through the element path list and update the split_plan stripped_field_alias = [] cur_path_index = 0 while cur_path_index < len(element_paths): # extract the sub element name for each of the root path of the path chain element_path = element_paths[cur_path_index] if element_path.get_parent() is None and len(element_path.get()) > 1: stripped_part = element_path.get()[1] if stripped_part == ElementPath.WILDCARD: stripped_field_alias.append('__root__') else: if stripped_part not in stripped_field_alias: stripped_field_alias.append(stripped_part) # split model at the path chain cur_path_index = cls.split_model_at_path_chain( model_obj, element_paths, base_dir, content_type, cur_path_index, split_plan, False, root_file_name, aliases_to_strip ) cur_path_index += 1 # strip the root model object and add a WriteAction stripped_root = model_obj.stripped_instance(stripped_fields_aliases=stripped_field_alias) # If it's an empty model after stripping the fields, don't create path and don't write if set(model_obj.__fields__.keys()) == set(stripped_field_alias): return split_plan if root_file_name != '': root_file = base_dir / root_file_name else: root_file = base_dir / element_paths[0].to_root_path(content_type) split_plan.add_action(CreatePathAction(root_file, True)) wrapper_alias = classname_to_alias(stripped_root.__class__.__name__, AliasMode.JSON) split_plan.add_action(WriteFileAction(root_file, Element(stripped_root, wrapper_alias), content_type)) return split_plan
def delete_model(self, model_type: Type[OscalBaseModel], name: str) -> bool: """Delete an OSCAL model from repository.""" logger.debug(f'Deleting model {name} of type {model_type.__name__}.') model_alias = classname_to_alias(model_type.__name__, AliasMode.JSON) if parser.to_full_model_name(model_alias) is None: raise TrestleError(f'Given model {model_alias} is not a top level model.') plural_path = ModelUtils.model_type_to_model_dir(model_alias) desired_model_dir = self._root_dir / plural_path / name if not desired_model_dir.exists() or not desired_model_dir.is_dir(): raise TrestleError(f'Model {name} does not exist.') shutil.rmtree(desired_model_dir) # remove model from dist directory if it exists dist_model_dir = self._root_dir / const.TRESTLE_DIST_DIR / plural_path file_content_type = FileContentType.path_to_content_type(dist_model_dir / name) if file_content_type != FileContentType.UNKNOWN: file_path = pathlib.Path( dist_model_dir, name + FileContentType.path_to_file_extension(dist_model_dir / name) ) logger.debug(f'Deleting model {name} from dist directory.') os.remove(file_path) logger.debug(f'Model {name} deleted successfully.') return True
def import_model(self, model: OscalBaseModel, name: str, content_type='json') -> ManagedOSCAL: """Import OSCAL object into trestle repository.""" logger.debug(f'Importing model {name} of type {model.__class__.__name__}.') model_alias = classname_to_alias(model.__class__.__name__, AliasMode.JSON) if parser.to_full_model_name(model_alias) is None: raise TrestleError(f'Given model {model_alias} is not a top level model.') # Work out output directory and file plural_path = ModelUtils.model_type_to_model_dir(model_alias) desired_model_dir = self._root_dir / plural_path desired_model_path = desired_model_dir / name / (model_alias + '.' + content_type) desired_model_path = desired_model_path.resolve() if desired_model_path.exists(): raise TrestleError(f'OSCAL file to be created here: {desired_model_path} exists.') content_type = FileContentType.to_content_type(pathlib.Path(desired_model_path).suffix) # Prepare actions top_element = Element(model) create_action = CreatePathAction(desired_model_path, True) write_action = WriteFileAction(desired_model_path, top_element, content_type) # create a plan to create the directory and imported file. import_plan = Plan() import_plan.add_action(create_action) import_plan.add_action(write_action) import_plan.execute() # Validate the imported file, rollback if unsuccessful success = False errmsg = '' try: success = self.validate_model(model.__class__, name) if not success: errmsg = f'Validation of model {name} did not pass' logger.error(errmsg) except Exception as err: logger.error(errmsg) errmsg = f'Import of model {name} failed. Validation failed with error: {err}' if not success: # rollback in case of validation error or failure logger.debug(f'Rolling back import of model {name} to {desired_model_path}') try: import_plan.rollback() except TrestleError as err: logger.error(f'Failed to rollback: {err}. Remove {desired_model_path} to resolve state.') else: logger.debug(f'Successful rollback of import to {desired_model_path}') # raise trestle error raise TrestleError(errmsg) # all well; model was imported and validated successfully logger.debug(f'Model {name} of type {model.__class__.__name__} imported successfully.') return ManagedOSCAL(self._root_dir, model.__class__, name)
def list_models(self, model_type: Type[OscalBaseModel]) -> List[str]: """List models of a given type in trestle repository.""" logger.debug(f'Listing models of type {model_type.__name__}.') model_alias = classname_to_alias(model_type.__name__, AliasMode.JSON) if parser.to_full_model_name(model_alias) is None: raise TrestleError(f'Given model {model_alias} is not a top level model.') models = ModelUtils.get_models_of_type(model_alias, self._root_dir) return models
def oscal_read(cls, path: pathlib.Path) -> Optional['OscalBaseModel']: """ Read OSCAL objects. Handles the fact OSCAL wraps top level elements and also deals with both yaml and json. Args: path: The path of the oscal object to read. Returns: The oscal object read into trestle oscal models. """ # Create the wrapper model. alias = classname_to_alias(cls.__name__, AliasMode.JSON) content_type = FileContentType.to_content_type(path.suffix) logger.debug( f'oscal_read content type {content_type} and alias {alias} from {path}' ) if not path.exists(): logger.warning(f'path does not exist in oscal_read: {path}') return None obj: Dict[str, Any] = {} try: if content_type == FileContentType.YAML: yaml = YAML(typ='safe') fh = path.open('r', encoding=const.FILE_ENCODING) obj = yaml.load(fh) fh.close() elif content_type == FileContentType.JSON: obj = load_file( path, json_loads=cls.__config__.json_loads, ) except Exception as e: raise err.TrestleError(f'Error loading file {path} {str(e)}') try: if not len(obj) == 1: raise err.TrestleError( f'Invalid OSCAL file structure, oscal file ' f'does not have a single top level key wrapping it. It has {len(obj)} keys.' ) parsed = cls.parse_obj(obj[alias]) except KeyError: raise err.TrestleError( f'Provided oscal file does not have top level key key: {alias}' ) except Exception as e: raise err.TrestleError(f'Error parsing file {path} {str(e)}') return parsed
def get_model(self, model_type: Type[OscalBaseModel], name: str) -> ManagedOSCAL: """Get a specific OSCAL model from repository.""" logger.debug(f'Getting model {name} of type {model_type.__name__}.') model_alias = classname_to_alias(model_type.__name__, AliasMode.JSON) if parser.to_full_model_name(model_alias) is None: raise TrestleError(f'Given model {model_alias} is not a top level model.') plural_path = ModelUtils.model_type_to_model_dir(model_alias) desired_model_dir = self._root_dir / plural_path / name if not desired_model_dir.exists() or not desired_model_dir.is_dir(): raise TrestleError(f'Model {name} does not exist.') return ManagedOSCAL(self._root_dir, model_type, name)
def prepare_sub_model_split_actions( cls, sub_model_item: OscalBaseModel, sub_model_dir: pathlib.Path, file_prefix: str, content_type: FileContentType ) -> List[Action]: """Create split actions of sub model.""" actions: List[Action] = [] file_name = cmd_utils.to_model_file_name(sub_model_item, file_prefix, content_type) model_type = classname_to_alias(type(sub_model_item).__name__, AliasMode.JSON) sub_model_file = sub_model_dir / file_name actions.append(CreatePathAction(sub_model_file)) actions.append(WriteFileAction(sub_model_file, Element(sub_model_item, model_type), content_type)) return actions
def prepare_trestle_project_dir( repo_dir: pathlib.Path, content_type: FileContentType, model_obj: OscalBaseModel, models_dir_name: str ): """Prepare a temp directory with an example OSCAL model.""" ensure_trestle_config_dir(repo_dir) model_alias = str_utils.classname_to_alias(model_obj.__class__.__name__, AliasMode.JSON) file_ext = FileContentType.to_file_extension(content_type) models_full_path = repo_dir / models_dir_name / 'my_test_model' model_def_file = models_full_path / f'{model_alias}{file_ext}' models_full_path.mkdir(exist_ok=True, parents=True) model_obj.oscal_write(model_def_file) return models_full_path, model_def_file
def validate_model(self, model_type: Type[OscalBaseModel], name: str) -> bool: """Validate an OSCAL model in repository.""" logger.debug(f'Validating model {name} of type {model_type.__name__}.') success = False model_alias = classname_to_alias(model_type.__name__, AliasMode.JSON) if parser.to_full_model_name(model_alias) is None: raise TrestleError(f'Given model {model_alias} is not a top level model.') verbose = log.get_current_verbosity_level(logger) args = argparse.Namespace(type=model_alias, name=name, trestle_root=self._root_dir, verbose=verbose) try: ret = validatecmd.ValidateCmd()._run(args) if ret == 0: success = True except Exception as e: raise TrestleError(f'Error in validating model: {e}') logger.debug(f'Model {name} validated successfully.') return success
def assemble_model(self, model_type: Type[OscalBaseModel], name: str, extension='json') -> bool: """Assemble an OSCAL model in repository and publish it to 'dist' directory.""" logger.debug(f'Assembling model {name} of type {model_type.__name__}.') success = False model_alias = classname_to_alias(model_type.__name__, AliasMode.JSON) if parser.to_full_model_name(model_alias) is None: raise TrestleError(f'Given model {model_alias} is not a top level model.') verbose = log.get_current_verbosity_level(logger) args = argparse.Namespace( type=model_alias, name=name, extension=extension, trestle_root=self._root_dir, verbose=verbose ) try: ret = assemblecmd.AssembleCmd().assemble_model(model_alias, args) if ret == 0: success = True except Exception as e: raise TrestleError(f'Error in assembling model: {e}') logger.debug(f'Model {name} assembled successfully.') return success
def to_model_file_name(model_obj: OscalBaseModel, file_prefix: str, content_type: FileContentType) -> str: """Return the file name for the item.""" file_ext = FileContentType.to_file_extension(content_type) model_type = classname_to_alias(type(model_obj).__name__, AliasMode.JSON) file_name = f'{file_prefix}{const.IDX_SEP}{model_type}{file_ext}' return file_name
def get_singular_alias( alias_path: str, relative_path: Optional[pathlib.Path] = None) -> str: """ Get the alias in the singular form from a jsonpath. If contextual_mode is True and contextual_path is None, it assumes alias_path is relative to the directory the user is running trestle from. Args: alias_path: The current alias element path as a string relative_path: Optional relative path (w.r.t. trestle_root) to cater for relative element paths. Returns: Alias as a string """ if len(alias_path.strip()) == 0: raise err.TrestleError(f'Invalid jsonpath {alias_path}') singular_alias: str = '' full_alias_path = alias_path if relative_path: logger.debug(f'get_singular_alias contextual mode: {str}') _, full_model_alias = ModelUtils.get_relative_model_type( relative_path) first_alias_a = full_model_alias.split('.')[-1] first_alias_b = alias_path.split('.')[0] if first_alias_a == first_alias_b: full_model_alias = '.'.join(full_model_alias.split('.')[:-1]) full_alias_path = '.'.join([full_model_alias, alias_path]).strip('.') path_parts = full_alias_path.split(const.ALIAS_PATH_SEPARATOR) logger.debug(f'path parts: {path_parts}') model_types = [] root_model_alias = path_parts[0] found = False for module_name in const.MODEL_TYPE_TO_MODEL_MODULE.values(): model_type, model_alias = ModelUtils.get_root_model(module_name) if root_model_alias == model_alias: found = True model_types.append(model_type) break if not found: raise err.TrestleError( f'{root_model_alias} is an invalid root model alias.') if len(path_parts) == 1: return root_model_alias model_type = model_types[0] # go through path parts skipping first one for i in range(1, len(path_parts)): if utils.is_collection_field_type(model_type): # if it is a collection type and last part is * then break if i == len(path_parts) - 1 and path_parts[i] == '*': break # otherwise get the inner type of items in the collection model_type = utils.get_inner_type(model_type) # and bump i i = i + 1 else: path_part = path_parts[i] field_map = model_type.alias_to_field_map() if path_part not in field_map: continue field = field_map[path_part] model_type = field.outer_type_ model_types.append(model_type) last_alias = path_parts[-1] if last_alias == '*': last_alias = path_parts[-2] # generic model and not list, so return itself fixme doc if not utils.is_collection_field_type(model_type): return last_alias parent_model_type = model_types[-2] try: field_map = parent_model_type.alias_to_field_map() field = field_map[last_alias] outer_type = field.outer_type_ inner_type = utils.get_inner_type(outer_type) inner_type_name = inner_type.__name__ singular_alias = str_utils.classname_to_alias( inner_type_name, AliasMode.JSON) except Exception as e: raise err.TrestleError(f'Error in json path {alias_path}: {e}') return singular_alias
def split_model_at_path_chain( cls, model_obj: OscalBaseModel, element_paths: List[ElementPath], base_dir: pathlib.Path, content_type: FileContentType, cur_path_index: int, split_plan: Plan, strip_root: bool, root_file_name: str, aliases_to_strip: Dict[str, AliasTracker], last_one: bool = True ) -> int: """Recursively split the model at the provided chain of element paths. It assumes that a chain of element paths starts at the cur_path_index with the first path ending with a wildcard (*) If the wildcard follows an element that is inherently a list of items, the list of items is extracted. But if the wildcard follows a generic model than members of that model class found in the model will be split off. But only the non-trivial elements are removed, i.e. not str, int, datetime, etc. Args: model_obj: The OscalBaseModel to be split element_paths: The List[ElementPath] of elements to split, including embedded wildcards base_dir: pathlib.Path of the file being split content_type: json or yaml files cur_path_index: Index into the list of element paths for the current split operation split_plan: The accumulated plan of actions needed to perform the split strip_root: Whether to strip elements from the root object root_file_name: Filename of root file that gets split into a list of items aliases_to_strip: AliasTracker previously loaded with aliases that need to be split from each element last_one: bool indicating last item in array has been split and stripped model can now be written Returns: int representing the index where the chain of the path ends. Examples: For example, element paths could have a list of paths as below for a `ComponentDefinition` model where the first path is the start of the chain. For each of the sub model described by the first element path (e.g component-defintion.components.*) in the chain, the subsequent paths (e.g component.control-implementations.*) will be applied recursively to retrieve the sub-sub models: [ 'component-definition.component.*', 'component.control-implementations.*' ] for a command like below: trestle split -f component.yaml -e component-definition.components.*.control-implementations.* """ if split_plan is None: raise TrestleError('Split plan must have been initialized') if cur_path_index < 0: raise TrestleError('Current index of the chain of paths cannot be less than 0') # if there are no more element_paths, return the current plan if cur_path_index >= len(element_paths): return cur_path_index # initialize local variables element = Element(model_obj) stripped_field_alias: List[str] = [] # get the sub_model specified by the element_path of this round element_path = element_paths[cur_path_index] # does the next element_path point back at me is_parent = cur_path_index + 1 < len(element_paths) and element_paths[cur_path_index + 1].get_parent() == element_path # root dir name for sub models dir # 00000__group.json will have the root_dir name as 00000__group for sub models of group # catalog.json will have the root_dir name as catalog root_dir = '' if root_file_name != '': root_dir = str(pathlib.Path(root_file_name).with_suffix('')) sub_models = element.get_at(element_path, False) # we call sub_models as in plural, but it can be just one # assume cur_path_index is the end of the chain # value of this variable may change during recursive split of the sub-models below path_chain_end = cur_path_index # if wildcard is present in the element_path and the next path in the chain has current path as the parent, # Then deal with case of list, or split of arbitrary oscalbasemodel if is_parent and element_path.get_last() is not ElementPath.WILDCARD: # create dir for all sub model items sub_models_dir = base_dir / element_path.to_root_path() sub_model_plan = Plan() path_chain_end = cls.split_model_at_path_chain( sub_models, element_paths, sub_models_dir, content_type, cur_path_index + 1, sub_model_plan, True, '', aliases_to_strip ) sub_model_actions = sub_model_plan.get_actions() split_plan.add_actions(sub_model_actions) elif element_path.get_last() == ElementPath.WILDCARD: # extract sub-models into a dict with appropriate prefix sub_model_items: Dict[str, OscalBaseModel] = {} sub_models_dir = base_dir / element_path.to_file_path(root_dir=root_dir) if isinstance(sub_models, list): for i, sub_model_item in enumerate(sub_models): # e.g. `groups/00000_groups/` prefix = str(i).zfill(const.FILE_DIGIT_PREFIX_LENGTH) sub_model_items[prefix] = sub_model_item # process list sub model items count = 0 for key, sub_model_item in sub_model_items.items(): count += 1 # recursively split the sub-model if there are more element paths to traverse # e.g. split component.control-implementations.* require_recursive_split = cur_path_index + 1 < len(element_paths) and element_paths[ cur_path_index + 1].get_parent() == element_path if require_recursive_split: # prepare individual directory for each sub-model sub_root_file_name = cmd_utils.to_model_file_name(sub_model_item, key, content_type) sub_model_plan = Plan() last_one: bool = count == len(sub_model_items) path_chain_end = cls.split_model_at_path_chain( sub_model_item, element_paths, sub_models_dir, content_type, cur_path_index + 1, sub_model_plan, True, sub_root_file_name, aliases_to_strip, last_one ) sub_model_actions = sub_model_plan.get_actions() else: sub_model_actions = cls.prepare_sub_model_split_actions( sub_model_item, sub_models_dir, key, content_type ) split_plan.add_actions(sub_model_actions) else: # the chain of path ends at the current index. # so no recursive call. Let's just write the sub model to the file and get out if sub_models is not None: sub_model_file = base_dir / element_path.to_file_path(content_type, root_dir=root_dir) split_plan.add_action(CreatePathAction(sub_model_file)) split_plan.add_action( WriteFileAction(sub_model_file, Element(sub_models, element_path.get_element_name()), content_type) ) # Strip the root model and add a WriteAction for the updated model object in the plan if strip_root: full_path = element_path.get_full() path = '.'.join(full_path.split('.')[:-1]) aliases = [element_path.get_element_name()] need_to_write = True use_alias_dict = aliases_to_strip is not None and path in aliases_to_strip if use_alias_dict: aliases = aliases_to_strip[path].get_aliases() need_to_write = aliases_to_strip[path].needs_writing() stripped_model = model_obj.stripped_instance(stripped_fields_aliases=aliases) # can mark it written even if it doesn't need writing since it is empty # but if an array only mark it written if it's the last one if last_one and use_alias_dict: aliases_to_strip[path].mark_written() # If it's an empty model after stripping the fields, don't create path and don't write field_list = [x for x in model_obj.__fields__.keys() if model_obj.__fields__[x] is not None] if set(field_list) == set(stripped_field_alias): return path_chain_end if need_to_write: if root_file_name != '': root_file = base_dir / root_file_name else: root_file = base_dir / element_path.to_root_path(content_type) split_plan.add_action(CreatePathAction(root_file)) wrapper_alias = classname_to_alias(stripped_model.__class__.__name__, AliasMode.JSON) split_plan.add_action(WriteFileAction(root_file, Element(stripped_model, wrapper_alias), content_type)) # return the end of the current path chain return path_chain_end
def get_type(self, root_model: Optional[Type[Any]] = None, use_parent: bool = False) -> Type[Any]: """Get the type of an element. If possible the model type will be derived from one of the top level models, otherwise a 'root model' can be passed for situations where this is not possible. This type path should *NOT* have wild cards in it. It *may* have* indices. Valid Examples: catalog.metadata catalog.groups catalog.groups.group catalog catalog.groups.0 Args: root_model: An OscalBaseModel Type from which to base the approach on. use_parent: Whether or not to normalise the full path across parent ElementPaths, default to not. Returns: The type of the model whether or not it is an OscalBaseModel or not. """ effective_path: List[str] if use_parent: effective_path = self.get_full_path_parts() else: effective_path = self._path if not root_model: # lookup root model from top level oscal models or fail prev_model = self._top_level_type_lookup(effective_path[0]) else: prev_model = root_model if len(effective_path) == 1: return prev_model # variables # for current_element_str in effective_path[1:]: for current_element_str in effective_path[1:]: # Determine if the parent model is a collection. if utils.is_collection_field_type(prev_model): inner_model = utils.get_inner_type(prev_model) inner_class_name = classname_to_alias(inner_model.__name__, AliasMode.JSON) # Assert that the current name fits an expected form. # Valid choices here are *, integer (for arrays) and the inner model alias if (inner_class_name == current_element_str or current_element_str == self.WILDCARD or current_element_str.isnumeric()): prev_model = inner_model else: raise TrestleError( 'Unexpected key in element path when finding type.') else: # Indices, * are not allowed on non-collection types if current_element_str == self.WILDCARD: raise TrestleError( 'Wild card in unexpected position when trying to find class type.' + ' Element path type lookup can only occur where a single type can be identified.' ) prev_model = prev_model.alias_to_field_map( )[current_element_str].outer_type_ return prev_model
def generate_sample_model( model: Union[Type[TG], List[TG], Dict[str, TG]], include_optional: bool = False, depth: int = -1 ) -> TG: """Given a model class, generate an object of that class with sample values. Can generate optional variables with an enabled flag. Any array objects will have a single entry injected into it. Note: Trestle generate will not activate recursive loops irrespective of the depth flag. Args: model: The model type provided. Typically for a user as an OscalBaseModel Subclass. include_optional: Whether or not to generate optional fields. depth: Depth of the tree at which optional fields are generated. Negative values (default) removes the limit. Returns: The generated instance with a pro-forma values filled out as best as possible. """ effective_optional = include_optional and not depth == 0 model_type = model # This block normalizes model type down to if utils.is_collection_field_type(model): # type: ignore model_type = utils.get_origin(model) # type: ignore model = utils.get_inner_type(model) # type: ignore model = cast(TG, model) model_dict = {} # this block is needed to avoid situations where an inbuilt is inside a list / dict. # the only time dict ever appears is with include_all, which is handled specially # the only type of collection possible after OSCAL 1.0.0 is list if safe_is_sub(model, OscalBaseModel): for field in model.__fields__: if field == 'include_all': if include_optional: model_dict[field] = {} continue outer_type = model.__fields__[field].outer_type_ # next appears to be needed for python 3.7 if utils.get_origin(outer_type) == Union: outer_type = outer_type.__args__[0] if model.__fields__[field].required or effective_optional: # FIXME could be ForwardRef('SystemComponentStatus') if utils.is_collection_field_type(outer_type): inner_type = utils.get_inner_type(outer_type) if inner_type == model: continue model_dict[field] = generate_sample_model( outer_type, include_optional=include_optional, depth=depth - 1 ) elif safe_is_sub(outer_type, OscalBaseModel): model_dict[field] = generate_sample_model( outer_type, include_optional=include_optional, depth=depth - 1 ) else: # Hacking here: # Root models should ideally not exist, however, sometimes we are stuck with them. # If that is the case we need sufficient information on the type in order to generate a model. # E.g. we need the type of the container. if field == '__root__' and hasattr(model, '__name__'): model_dict[field] = generate_sample_value_by_type( outer_type, str_utils.classname_to_alias(model.__name__, AliasMode.FIELD) ) else: model_dict[field] = generate_sample_value_by_type(outer_type, field) # Note: this assumes list constrains in oscal are always 1 as a minimum size. if two this may still fail. else: if model_type is list: return [generate_sample_value_by_type(model, '')] if model_type is dict: return {'REPLACE_ME': generate_sample_value_by_type(model, '')} raise err.TrestleError('Unhandled collection type.') if model_type is list: return [model(**model_dict)] if model_type is dict: return {'REPLACE_ME': model(**model_dict)} return model(**model_dict)
def test_subsequent_split_model_plans( tmp_path: pathlib.Path, sample_nist_component_def: component.ComponentDefinition, keep_cwd: pathlib.Path) -> None: """Test subsequent split of sub models.""" # Assume we are running a command like below # trestle split -f component-definition.yaml -e component-definition.metadata content_type = FileContentType.YAML # prepare trestle project dir with the file component_def_dir, component_def_file = test_utils.prepare_trestle_project_dir( tmp_path, content_type, sample_nist_component_def, test_utils.COMPONENT_DEF_DIR) # first split the component-def into metadata component_def = component.ComponentDefinition.oscal_read( component_def_file) element = Element(component_def, 'component-definition') element_args = ['component-definition.metadata'] element_paths = cmd_utils.parse_element_args( None, element_args, component_def_dir.relative_to(tmp_path)) metadata_file = component_def_dir / element_paths[0].to_file_path( content_type) metadata: common.Metadata = element.get_at(element_paths[0]) root_file = component_def_dir / element_paths[0].to_root_path(content_type) metadata_field_alias = element_paths[0].get_element_name() stripped_root = element.get().stripped_instance( stripped_fields_aliases=[metadata_field_alias]) root_wrapper_alias = str_utils.classname_to_alias( stripped_root.__class__.__name__, AliasMode.JSON) first_plan = Plan() first_plan.add_action(CreatePathAction(metadata_file)) first_plan.add_action( WriteFileAction(metadata_file, Element(metadata, metadata_field_alias), content_type)) first_plan.add_action(CreatePathAction(root_file, True)) first_plan.add_action( WriteFileAction(root_file, Element(stripped_root, root_wrapper_alias), content_type)) first_plan.execute() # this will split the files in the temp directory # now, prepare the expected plan to split metadta at parties second_plan = Plan() metadata_file_dir = component_def_dir / element_paths[0].to_root_path() metadata2 = common.Metadata.oscal_read(metadata_file) element = Element(metadata2, metadata_field_alias) element_args = ['metadata.parties.*'] element_paths = cmd_utils.parse_element_args( None, element_args, component_def_dir.relative_to(tmp_path)) parties_dir = metadata_file_dir / element_paths[0].to_file_path() for i, party in enumerate(element.get_at(element_paths[0])): prefix = str(i).zfill(const.FILE_DIGIT_PREFIX_LENGTH) sub_model_actions = SplitCmd.prepare_sub_model_split_actions( party, parties_dir, prefix, content_type) second_plan.add_actions(sub_model_actions) # stripped metadata stripped_metadata = metadata2.stripped_instance( stripped_fields_aliases=['parties']) second_plan.add_action(CreatePathAction(metadata_file, True)) second_plan.add_action( WriteFileAction(metadata_file, Element(stripped_metadata, metadata_field_alias), content_type)) # call the split command and compare the plans split_plan = SplitCmd.split_model(metadata, element_paths, metadata_file_dir, content_type, '', None) assert second_plan == split_plan
def test_split_multi_level_dict_plans( tmp_path: pathlib.Path, sample_nist_component_def: component.ComponentDefinition, keep_cwd) -> None: """Test for split_model method.""" # Assume we are running a command like below # trestle split -f target.yaml -e component-definition.components.*.control-implementations.* content_type = FileContentType.YAML # prepare trestle project dir with the file component_def_dir, component_def_file = test_utils.prepare_trestle_project_dir( tmp_path, content_type, sample_nist_component_def, test_utils.COMPONENT_DEF_DIR) file_ext = FileContentType.to_file_extension(content_type) # read the model from file component_def: component.ComponentDefinition = component.ComponentDefinition.oscal_read( component_def_file) element = Element(component_def) element_args = [ 'component-definition.components.*.control-implementations.*' ] element_paths = cmd_utils.parse_element_args( None, element_args, component_def_dir.relative_to(tmp_path)) expected_plan = Plan() # extract values components: list = element.get_at(element_paths[0]) components_dir = component_def_dir / element_paths[0].to_file_path() # split every targets for index, comp_obj in enumerate(components): # individual target dir component_element = Element(comp_obj) model_type = str_utils.classname_to_alias( type(comp_obj).__name__, AliasMode.JSON) dir_prefix = str(index).zfill(const.FILE_DIGIT_PREFIX_LENGTH) component_dir_name = f'{dir_prefix}{const.IDX_SEP}{model_type}' component_file = components_dir / f'{component_dir_name}{file_ext}' # target control impl dir for the target component_ctrl_impls: list = component_element.get_at(element_paths[1]) component_ctrl_dir = components_dir / element_paths[1].to_file_path( root_dir=component_dir_name) for i, component_ctrl_impl in enumerate(component_ctrl_impls): model_type = str_utils.classname_to_alias( type(component_ctrl_impl).__name__, AliasMode.JSON) file_prefix = str(i).zfill(const.FILE_DIGIT_PREFIX_LENGTH) file_name = f'{file_prefix}{const.IDX_SEP}{model_type}{file_ext}' file_path = component_ctrl_dir / file_name expected_plan.add_action(CreatePathAction(file_path)) expected_plan.add_action( WriteFileAction(file_path, Element(component_ctrl_impl), content_type)) # write stripped target model stripped_target = comp_obj.stripped_instance( stripped_fields_aliases=[element_paths[1].get_element_name()]) expected_plan.add_action(CreatePathAction(component_file)) expected_plan.add_action( WriteFileAction(component_file, Element(stripped_target), content_type)) root_file = component_def_dir / f'component-definition{file_ext}' remaining_root = element.get().stripped_instance( stripped_fields_aliases=[element_paths[0].get_element_name()]) expected_plan.add_action(CreatePathAction(root_file, True)) expected_plan.add_action( WriteFileAction(root_file, Element(remaining_root), content_type)) split_plan = SplitCmd.split_model(component_def, element_paths, component_def_dir, content_type, '', None) assert expected_plan == split_plan