def oscal_write(self, path: pathlib.Path) -> None: """ Write out a pydantic data model in an oscal friendly way. OSCAL schema mandates that top level elements are wrapped in a singular json/yaml field. This function handles both json and yaml output as well as formatting of the json. Args: path: The output file location for the oscal object. Raises: err.TrestleError: If a unknown file extension is provided. """ content_type = FileContentType.to_content_type(path.suffix) # The output will have \r\n newlines on windows and \n newlines elsewhere if content_type == FileContentType.YAML: write_file = pathlib.Path(path).open('w', encoding=const.FILE_ENCODING) yaml = YAML(typ='safe') yaml.dump(yaml.load(self.oscal_serialize_json()), write_file) write_file.flush() write_file.close() elif content_type == FileContentType.JSON: write_file = pathlib.Path(path).open('wb') write_file.write(self.oscal_serialize_json_bytes(pretty=True)) # Flush / close required (by experience) due to flushing issues in tests. write_file.flush() write_file.close()
def add_from_args(self, args: argparse.Namespace) -> int: """Parse args for add element to file.""" file_path = pathlib.Path(args.file).resolve() # Get parent model and then load json into parent model parent_model, _ = ModelUtils.get_stripped_model_type(file_path, args.trestle_root) parent_object = parent_model.oscal_read(file_path) parent_element = Element(parent_object, classname_to_alias(parent_model.__name__, AliasMode.JSON)) add_plan = Plan() # Do _add for each element_path specified in args element_paths: List[str] = args.element.split(',') for elm_path_str in element_paths: element_path = ElementPath(elm_path_str) update_action, parent_element = self.add(element_path, parent_element, args.include_optional_fields) add_plan.add_action(update_action) create_action = CreatePathAction(file_path, True) # this will output json or yaml based on type of input file write_action = WriteFileAction(file_path, parent_element, FileContentType.to_content_type(file_path.suffix)) add_plan.add_action(create_action) add_plan.add_action(write_action) add_plan.execute() return CmdReturnCodes.SUCCESS.value
def assemble_model(cls, model_alias: str, args: argparse.Namespace) -> int: """Assemble a top level OSCAL model within the trestle dist directory.""" log.set_log_level_from_args(args) logger.info(f'Assembling models of type {model_alias}.') trestle_root = args.trestle_root # trestle root is set via command line in args. Default is cwd. if not trestle_root or not file_utils.is_valid_project_root( args.trestle_root): raise TrestleRootError( f'Given directory {trestle_root} is not a trestle project.') model_names = [] if args.name: model_names = [args.name] logger.info( f'Assembling single model of type {model_alias}: {args.name}.') else: model_names = ModelUtils.get_models_of_type( model_alias, trestle_root) nmodels = len(model_names) logger.info( f'Assembling {nmodels} found models of type {model_alias}.') if len(model_names) == 0: logger.info(f'No models found to assemble of type {model_alias}.') return CmdReturnCodes.SUCCESS.value for model_name in model_names: # contruct path to the model file name root_model_dir = trestle_root / ModelUtils.model_type_to_model_dir( model_alias) model_file_type = file_utils.get_contextual_file_type( root_model_dir / model_name) model_file_name = f'{model_alias}{FileContentType.to_file_extension(model_file_type)}' root_model_filepath = root_model_dir / model_name / model_file_name if not root_model_filepath.exists(): raise TrestleError( f'No top level model file at {root_model_dir}') # distributed load _, _, assembled_model = ModelUtils.load_distributed( root_model_filepath, args.trestle_root) plural_alias = ModelUtils.model_type_to_model_dir(model_alias) assembled_model_dir = trestle_root / const.TRESTLE_DIST_DIR / plural_alias assembled_model_filepath = assembled_model_dir / f'{model_name}.{args.extension}' plan = Plan() plan.add_action(CreatePathAction(assembled_model_filepath, True)) plan.add_action( WriteFileAction( assembled_model_filepath, Element(assembled_model), FileContentType.to_content_type(f'.{args.extension}'))) plan.execute() return CmdReturnCodes.SUCCESS.value
def to_file_path(self, content_type: FileContentType = None, root_dir: str = '') -> pathlib.Path: """Convert to a file or directory path for the element path. if content_type is not passed, it will return a path for directory """ path_parts = self.get() # skip wildcard if path_parts[-1] == ElementPath.WILDCARD: path_parts = path_parts[:-1] if root_dir != '': path_parts[0] = root_dir path_str = '/'.join(path_parts) # add file extension if required # this will be omitted if it is a dir path if content_type is not None: file_extension = FileContentType.to_file_extension(content_type) path_str = path_str + file_extension # prepare the path file_path: pathlib.Path = pathlib.Path(f'./{path_str}') return file_path
def test_target_dups(tmp_dir): """Test model validation.""" content_type = FileContentType.YAML models_dir_name = test_utils.TARGET_DEFS_DIR model_ref = ostarget.TargetDefinition test_utils.ensure_trestle_config_dir(tmp_dir) file_ext = FileContentType.to_file_extension(content_type) models_full_path = tmp_dir / models_dir_name / 'my_test_model' model_alias = utils.classname_to_alias(model_ref.__name__, 'json') model_def_file = models_full_path / f'{model_alias}{file_ext}' fs.ensure_directory(models_full_path) shutil.copyfile('tests/data/yaml/good_target.yaml', model_def_file) testcmd = f'trestle validate -f {model_def_file} -m duplicates -i uuid' with patch.object(sys, 'argv', testcmd.split()): with pytest.raises(SystemExit) as pytest_wrapped_e: cli.run() assert pytest_wrapped_e.type == SystemExit assert pytest_wrapped_e.value.code is None shutil.copyfile('tests/data/yaml/bad_target_dup_uuid.yaml', model_def_file) testcmd = f'trestle validate -f {model_def_file} -m duplicates -i uuid' with patch.object(sys, 'argv', testcmd.split()): with pytest.raises(TrestleValidationError) as pytest_wrapped_e: cli.run() assert pytest_wrapped_e.type == TrestleValidationError
def import_model(self, model: OscalBaseModel, name: str, content_type='json') -> ManagedOSCAL: """Import OSCAL object into trestle repository.""" logger.debug(f'Importing model {name} of type {model.__class__.__name__}.') model_alias = classname_to_alias(model.__class__.__name__, AliasMode.JSON) if parser.to_full_model_name(model_alias) is None: raise TrestleError(f'Given model {model_alias} is not a top level model.') # Work out output directory and file plural_path = ModelUtils.model_type_to_model_dir(model_alias) desired_model_dir = self._root_dir / plural_path desired_model_path = desired_model_dir / name / (model_alias + '.' + content_type) desired_model_path = desired_model_path.resolve() if desired_model_path.exists(): raise TrestleError(f'OSCAL file to be created here: {desired_model_path} exists.') content_type = FileContentType.to_content_type(pathlib.Path(desired_model_path).suffix) # Prepare actions top_element = Element(model) create_action = CreatePathAction(desired_model_path, True) write_action = WriteFileAction(desired_model_path, top_element, content_type) # create a plan to create the directory and imported file. import_plan = Plan() import_plan.add_action(create_action) import_plan.add_action(write_action) import_plan.execute() # Validate the imported file, rollback if unsuccessful success = False errmsg = '' try: success = self.validate_model(model.__class__, name) if not success: errmsg = f'Validation of model {name} did not pass' logger.error(errmsg) except Exception as err: logger.error(errmsg) errmsg = f'Import of model {name} failed. Validation failed with error: {err}' if not success: # rollback in case of validation error or failure logger.debug(f'Rolling back import of model {name} to {desired_model_path}') try: import_plan.rollback() except TrestleError as err: logger.error(f'Failed to rollback: {err}. Remove {desired_model_path} to resolve state.') else: logger.debug(f'Successful rollback of import to {desired_model_path}') # raise trestle error raise TrestleError(errmsg) # all well; model was imported and validated successfully logger.debug(f'Model {name} of type {model.__class__.__name__} imported successfully.') return ManagedOSCAL(self._root_dir, model.__class__, name)
def assemble_model(cls, model_alias: str, object_type: Type[TLO], args: argparse.Namespace) -> int: """Assemble a top level OSCAL model within the trestle dist directory.""" log.set_log_level_from_args(args) trestle_root = fs.get_trestle_project_root(Path.cwd()) if not trestle_root: logger.error( f'Current working directory {Path.cwd()} is not with a trestle project.' ) return 1 if not trestle_root == Path.cwd(): logger.error( f'Current working directory {Path.cwd()} is not the top level trestle project directory.' ) return 1 # contruct path to the model file name root_model_dir = Path.cwd() / f'{model_alias}s' try: model_file_type = fs.get_contextual_file_type(root_model_dir / args.name) except Exception as e: logger.error('No files found in the specified model directory.') logger.debug(e) return 1 model_file_name = f'{model_alias}{FileContentType.to_file_extension(model_file_type)}' root_model_filepath = root_model_dir / args.name / model_file_name if not root_model_filepath.exists(): logger.error(f'No top level model file at {root_model_dir}') return 1 # distributed load _, _, assembled_model = load_distributed(root_model_filepath) plural_alias = model_alias if model_alias[ -1] == 's' else model_alias + 's' assembled_model_dir = trestle_root / const.TRESTLE_DIST_DIR / plural_alias assembled_model_filepath = assembled_model_dir / f'{args.name}.{args.extension}' plan = Plan() plan.add_action(CreatePathAction(assembled_model_filepath, True)) plan.add_action( WriteFileAction( assembled_model_filepath, Element(assembled_model), FileContentType.to_content_type(f'.{args.extension}'))) try: plan.simulate() plan.execute() return 0 except Exception as e: logger.error( 'Unknown error executing trestle create operations. Rolling back.' ) logger.debug(e) return 1
def create_object(cls, model_alias: str, object_type: Type[TLO], args: argparse.Namespace) -> int: """Create a top level OSCAL object within the trestle directory, leveraging functionality in add.""" log.set_log_level_from_args(args) trestle_root = fs.get_trestle_project_root(Path.cwd()) if not trestle_root: logger.error( f'Current working directory {Path.cwd()} is not with a trestle project.' ) return 1 plural_path: str # Cater to POAM if model_alias[-1] == 's': plural_path = model_alias else: plural_path = model_alias + 's' desired_model_dir = trestle_root / plural_path / args.name desired_model_path = desired_model_dir / (model_alias + '.' + args.extension) if desired_model_path.exists(): logger.error( f'OSCAL file to be created here: {desired_model_path} exists.') logger.error('Aborting trestle create.') return 1 # Create sample model. sample_model = generators.generate_sample_model(object_type) # Presuming top level level model not sure how to do the typing for this. sample_model.metadata.title = f'Generic {model_alias} created by trestle.' # type: ignore sample_model.metadata.last_modified = datetime.now().astimezone() sample_model.metadata.oscal_version = trestle.oscal.OSCAL_VERSION sample_model.metadata.version = '0.0.0' top_element = Element(sample_model, model_alias) create_action = CreatePathAction(desired_model_path.absolute(), True) write_action = WriteFileAction( desired_model_path.absolute(), top_element, FileContentType.to_content_type(desired_model_path.suffix)) # create a plan to write the directory and file. try: create_plan = Plan() create_plan.add_action(create_action) create_plan.add_action(write_action) create_plan.simulate() create_plan.execute() return 0 except Exception as e: logger.error( 'Unknown error executing trestle create operations. Rolling back.' ) logger.debug(e) return 1
def to_root_path(self, content_type: FileContentType = None) -> pathlib.Path: """Convert to a file path for the element root.""" path_str = f'./{self.get_first()}' if content_type is not None: file_extension = FileContentType.to_file_extension(content_type) path_str = path_str + file_extension file_path: pathlib.Path = pathlib.Path(path_str) return file_path
def full_path_for_top_level_model( trestle_root: pathlib.Path, model_name: str, model_class: Type[TopLevelOscalModel], ) -> pathlib.Path: """ Find the full path of an existing model given its name and model type but no file content type. Use this method when you need the path of a model but you don't know the file content type. This method should only be called if the model needs to exist already in the trestle directory. If you do know the file content type, use path_for_top_level_model instead. """ root_model_path = ModelUtils._root_path_for_top_level_model( trestle_root, model_name, model_class) file_content_type = FileContentType.path_to_content_type( root_model_path) if not FileContentType.is_readable_file(file_content_type): raise TrestleError( f'Unable to load model {model_name} as json or yaml.') return root_model_path.with_suffix( FileContentType.to_file_extension(file_content_type))
def add(cls, file_path, element_path, parent_model, parent_element): """For a file_path and element_path, add a child model to the parent_element of a given parent_model. First we find the child model at the specified element path and instantiate it with default values. Then we check if there's already existing element at that path, in which case we append the child model to the existing list of dict. Then we set up an action plan to update the model (specified by file_path) in memory, create a file at the same location and write the file. """ element_path_list = element_path.get_full_path_parts() if '*' in element_path_list: raise err.TrestleError( 'trestle add does not support Wildcard element path.') # Get child model try: child_model = utils.get_target_model(element_path_list, parent_model) # Create child element with sample values child_object = utils.get_sample_model(child_model) if parent_element.get_at(element_path) is not None: # The element already exists if type(parent_element.get_at(element_path)) is list: child_object = parent_element.get_at( element_path) + child_object elif type(parent_element.get_at(element_path)) is dict: child_object = { **parent_element.get_at(element_path), **child_object } else: raise err.TrestleError( 'Already exists and is not a list or dictionary.') except Exception as e: raise err.TrestleError(f'Bad element path. {str(e)}') update_action = UpdateAction(sub_element=child_object, dest_element=parent_element, sub_element_path=element_path) create_action = CreatePathAction(file_path.absolute(), True) write_action = WriteFileAction( file_path.absolute(), parent_element, FileContentType.to_content_type(file_path.suffix)) add_plan = Plan() add_plan.add_action(update_action) add_plan.add_action(create_action) add_plan.add_action(write_action) add_plan.simulate() add_plan.execute()
def oscal_read(cls, path: pathlib.Path) -> Optional['OscalBaseModel']: """ Read OSCAL objects. Handles the fact OSCAL wraps top level elements and also deals with both yaml and json. Args: path: The path of the oscal object to read. Returns: The oscal object read into trestle oscal models. """ # Create the wrapper model. alias = classname_to_alias(cls.__name__, AliasMode.JSON) content_type = FileContentType.to_content_type(path.suffix) logger.debug( f'oscal_read content type {content_type} and alias {alias} from {path}' ) if not path.exists(): logger.warning(f'path does not exist in oscal_read: {path}') return None obj: Dict[str, Any] = {} try: if content_type == FileContentType.YAML: yaml = YAML(typ='safe') fh = path.open('r', encoding=const.FILE_ENCODING) obj = yaml.load(fh) fh.close() elif content_type == FileContentType.JSON: obj = load_file( path, json_loads=cls.__config__.json_loads, ) except Exception as e: raise err.TrestleError(f'Error loading file {path} {str(e)}') try: if not len(obj) == 1: raise err.TrestleError( f'Invalid OSCAL file structure, oscal file ' f'does not have a single top level key wrapping it. It has {len(obj)} keys.' ) parsed = cls.parse_obj(obj[alias]) except KeyError: raise err.TrestleError( f'Provided oscal file does not have top level key key: {alias}' ) except Exception as e: raise err.TrestleError(f'Error parsing file {path} {str(e)}') return parsed
def load_file(file_name: pathlib.Path) -> Dict[str, Any]: """ Load JSON or YAML file content into a dict. This is not intended to be the default load mechanism. It should only be used if a OSCAL object type is unknown but the context a user is in. """ content_type = FileContentType.to_content_type(file_name.suffix) with file_name.open('r', encoding=const.FILE_ENCODING) as f: if content_type == FileContentType.YAML: return yaml.load(f, yaml.FullLoader) elif content_type == FileContentType.JSON: return json.load(f)
def path_for_top_level_model( trestle_root: pathlib.Path, model_name: str, model_class: Type[TopLevelOscalModel], file_content_type: FileContentType) -> pathlib.Path: """ Find the full path of a model given its name, model type and file content type. This does not inspect the file system or confirm the needed path and file exists. """ root_path = ModelUtils._root_path_for_top_level_model( trestle_root, model_name, model_class) return root_path.with_suffix( FileContentType.to_file_extension(file_content_type))
def test_merge_everything_into_catalog(testdata_dir, tmp_trestle_dir): """Test '$mycatalog$ trestle merge -e catalog.*' when metadata and catalog is already split.""" # Assume we are running a command like below # trestle merge -e catalog.* content_type = FileContentType.JSON fext = FileContentType.to_file_extension(content_type) # prepare trestle project dir with the file test_utils.ensure_trestle_config_dir(tmp_trestle_dir) test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs' catalogs_dir = Path('catalogs/') mycatalog_dir = catalogs_dir / 'mycatalog' # Copy files from test/data/split_merge/step4 shutil.rmtree(catalogs_dir) shutil.copytree(test_data_source, catalogs_dir) # Change directory to mycatalog_dir os.chdir(mycatalog_dir) catalog_file = Path(f'catalog{fext}').resolve() assert catalog_file.exists() # Read files # Create hand-crafter merge plan expected_plan: Plan = Plan() reset_destination_action = CreatePathAction(catalog_file, clear_content=True) expected_plan.add_action(reset_destination_action) _, _, merged_catalog_instance = ModelUtils.load_distributed( catalog_file, tmp_trestle_dir) element = Element(merged_catalog_instance) write_destination_action = WriteFileAction(catalog_file, element, content_type=content_type) expected_plan.add_action(write_destination_action) delete_element_action = RemovePathAction(Path('catalog').resolve()) expected_plan.add_action(delete_element_action) # Call merge() generated_plan = MergeCmd.merge(Path.cwd(), ElementPath('catalog.*'), tmp_trestle_dir) # Assert the generated plan matches the expected plan' assert generated_plan == expected_plan
def get_contextual_file_type(path: pathlib.Path) -> FileContentType: """Return the file content type for files in the given directory, if it's a trestle project.""" if not is_valid_project_model_path(path): raise err.TrestleError('Trestle project not found.') for file_or_directory in path.iterdir(): if file_or_directory.is_file(): return FileContentType.to_content_type(file_or_directory.suffix) for file_or_directory in path.iterdir(): if file_or_directory.is_dir(): return get_contextual_file_type(file_or_directory) raise err.TrestleError('No files found in the project.')
def prepare_trestle_project_dir( repo_dir: pathlib.Path, content_type: FileContentType, model_obj: OscalBaseModel, models_dir_name: str ): """Prepare a temp directory with an example OSCAL model.""" ensure_trestle_config_dir(repo_dir) model_alias = str_utils.classname_to_alias(model_obj.__class__.__name__, AliasMode.JSON) file_ext = FileContentType.to_file_extension(content_type) models_full_path = repo_dir / models_dir_name / 'my_test_model' model_def_file = models_full_path / f'{model_alias}{file_ext}' models_full_path.mkdir(exist_ok=True, parents=True) model_obj.oscal_write(model_def_file) return models_full_path, model_def_file
def save_top_level_model(model: TopLevelOscalModel, trestle_root: pathlib.Path, model_name: str, file_content_type: FileContentType) -> None: """Save a model by name and infer model type by inspection. You don't need to specify the model type (catalog, profile, etc.) but you must specify the file content type. If the model directory does not exist, it is created. """ root_model_path = ModelUtils._root_path_for_top_level_model( trestle_root, model_name, model) full_model_path = root_model_path.with_suffix( FileContentType.to_file_extension(file_content_type)) if not full_model_path.parent.exists(): full_model_path.parent.mkdir(parents=True, exist_ok=True) model.oscal_write(full_model_path)
def prepare_trestle_project_dir( tmp_dir, content_type: FileContentType, model_obj: OscalBaseModel, models_dir_name: str ): """Prepare a temp directory with an example OSCAL model.""" ensure_trestle_config_dir(tmp_dir) model_alias = utils.classname_to_alias(model_obj.__class__.__name__, 'json') file_ext = FileContentType.to_file_extension(content_type) models_full_path = tmp_dir / models_dir_name / 'my_test_model' model_def_file = models_full_path / f'{model_alias}{file_ext}' fs.ensure_directory(models_full_path) model_obj.oscal_write(model_def_file) return models_full_path, model_def_file
def find_last_file_in_path(self, content_type: FileContentType, model_dir: pathlib.Path) -> pathlib.Path: """Find the last (nearest) existing file in the element path leading to this element.""" # model dir is the top level dir for this model, e.g. catalogs/mycat path = model_dir extension = FileContentType.to_file_extension(content_type) good_model: pathlib.Path = None for element in self._path: if element == '*': break model_file = (path / element).with_suffix(extension) if not model_file.exists(): break path = path / element good_model = model_file return good_model
def _run(self, args: argparse.Namespace) -> int: """Add an OSCAL component/subcomponent to the specified component. This method takes input a filename and a list of comma-seperated element path. Element paths are field aliases. The method first finds the parent model from the file and loads the file into the model. Then the method executes 'add' for each of the element paths specified. """ log.set_log_level_from_args(args) try: args_dict = args.__dict__ file_path = pathlib.Path(args_dict[const.ARG_FILE]) # Get parent model and then load json into parent model parent_model, parent_alias = fs.get_stripped_contextual_model( file_path.absolute()) parent_object = parent_model.oscal_read(file_path.absolute()) # FIXME : handle YAML files after detecting file type parent_element = Element( parent_object, utils.classname_to_alias(parent_model.__name__, 'json')) add_plan = Plan() # Do _add for each element_path specified in args element_paths: List[str] = args_dict[const.ARG_ELEMENT].split(',') for elm_path_str in element_paths: element_path = ElementPath(elm_path_str) update_action, parent_element = self.add( element_path, parent_model, parent_element) add_plan.add_action(update_action) create_action = CreatePathAction(file_path.absolute(), True) write_action = WriteFileAction( file_path.absolute(), parent_element, FileContentType.to_content_type(file_path.suffix)) add_plan.add_action(create_action) add_plan.add_action(write_action) add_plan.simulate() add_plan.execute() except BaseException as err: logger.error(f'Add failed: {err}') return 1 return 0
def _run(self, args: argparse.Namespace) -> int: """Remove an OSCAL component/subcomponent to the specified component. This method takes input a filename and a list of comma-seperated element path. Element paths are field aliases. The method first finds the parent model from the file and loads the file into the model. Then the method executes 'remove' for each of the element paths specified. """ try: log.set_log_level_from_args(args) args_dict = args.__dict__ file_path = pathlib.Path(args_dict[const.ARG_FILE]).resolve() relative_path = file_path.relative_to(args.trestle_root) # Get parent model and then load json into parent model parent_model, parent_alias = ModelUtils.get_relative_model_type( relative_path) parent_object = parent_model.oscal_read(file_path) parent_element = Element(parent_object, parent_alias) add_plan = Plan() # Do _remove for each element_path specified in args element_paths: List[str] = str( args_dict[const.ARG_ELEMENT]).split(',') for elm_path_str in element_paths: element_path = ElementPath(elm_path_str) remove_action, parent_element = self.remove( element_path, parent_element) add_plan.add_action(remove_action) create_action = CreatePathAction(file_path, True) write_action = WriteFileAction( file_path, parent_element, FileContentType.to_content_type(file_path.suffix)) add_plan.add_action(remove_action) add_plan.add_action(create_action) add_plan.add_action(write_action) add_plan.execute() return CmdReturnCodes.SUCCESS.value except Exception as e: return err.handle_generic_command_exception( e, logger, 'Error while removing OSCAL component')
def _run(self, args: argparse.Namespace) -> int: """Split an OSCAL file into elements.""" logger.debug('Entering trestle split.') log.set_log_level_from_args(args) # get the Model args_raw = args.__dict__ if args_raw[const.ARG_FILE] is None: logger.error(f'Argument "-{const.ARG_FILE_SHORT}" is required') return 1 file_path = pathlib.Path(args_raw[const.ARG_FILE]) if not file_path.exists(): logger.error(f'File {file_path} does not exist.') return 1 content_type = FileContentType.to_content_type(file_path.suffix) # find the base directory of the file file_absolute_path = pathlib.Path(file_path.absolute()) base_dir = file_absolute_path.parent model_type, _ = fs.get_stripped_contextual_model(file_absolute_path) # FIXME: Handle list/dicts model: OscalBaseModel = model_type.oscal_read(file_path) element_paths: List[ElementPath] = cmd_utils.parse_element_args( args_raw[const.ARG_ELEMENT].split(',')) split_plan = self.split_model(model, element_paths, base_dir, content_type, root_file_name=args_raw[const.ARG_FILE]) # Simulate the plan # if it fails, it would throw errors and get out of this command split_plan.simulate() # If we are here then simulation passed # so move the original file to the trash trash.store(file_path, True) # execute the plan split_plan.execute() return 0
def create_object(cls, model_alias: str, object_type: Type[TopLevelOscalModel], args: argparse.Namespace) -> int: """Create a top level OSCAL object within the trestle directory, leveraging functionality in add.""" log.set_log_level_from_args(args) trestle_root = args.trestle_root # trestle root is set via command line in args. Default is cwd. if not trestle_root or not file_utils.is_valid_project_root( args.trestle_root): raise err.TrestleRootError( f'Given directory {trestle_root} is not a trestle project.') plural_path = ModelUtils.model_type_to_model_dir(model_alias) desired_model_dir = trestle_root / plural_path / args.output desired_model_path = desired_model_dir / (model_alias + '.' + args.extension) if desired_model_path.exists(): raise err.TrestleError( f'OSCAL file to be created here: {desired_model_path} exists.') # Create sample model. sample_model = generators.generate_sample_model( object_type, include_optional=args.include_optional_fields) # Presuming top level level model not sure how to do the typing for this. sample_model.metadata.title = f'Generic {model_alias} created by trestle named {args.output}.' # type: ignore sample_model.metadata.last_modified = datetime.now().astimezone() sample_model.metadata.oscal_version = trestle.oscal.OSCAL_VERSION sample_model.metadata.version = '0.0.0' top_element = Element(sample_model, model_alias) create_action = CreatePathAction(desired_model_path.resolve(), True) write_action = WriteFileAction( desired_model_path.resolve(), top_element, FileContentType.to_content_type(desired_model_path.suffix)) # create a plan to write the directory and file. create_plan = Plan() create_plan.add_action(create_action) create_plan.add_action(write_action) create_plan.execute() return CmdReturnCodes.SUCCESS.value
def oscal_write(self, path: pathlib.Path) -> None: """ Write out a pydantic data model in an oscal friendly way. OSCAL schema mandates that top level elements are wrapped in a singular json/yaml field. This function handles both json and yaml output as well as formatting of the json. Args: path: The output file location for the oscal object. Raises: err.TrestleError: If a unknown file extension is provided. """ class_name = self.__class__.__name__ # It would be nice to pass through the description but I can't seem to and # it does not affect the output dynamic_parser = {} dynamic_parser[classname_to_alias( class_name, 'field')] = (self.__class__, Field(self, title=classname_to_alias(class_name, 'field'), alias=classname_to_alias(class_name, 'json'))) wrapper_model = create_model(class_name, __base__=OscalBaseModel, **dynamic_parser) # type: ignore # Default behaviour is strange here. wrapped_model = wrapper_model( **{classname_to_alias(class_name, 'json'): self}) # content_type = FileContentType.to_content_type(path.suffix) write_file = pathlib.Path(path).open('w', encoding=const.FILE_ENCODING) if content_type == FileContentType.YAML: yaml.dump( yaml.safe_load( wrapped_model.json(exclude_none=True, by_alias=True)), write_file) elif content_type == FileContentType.JSON: write_file.write( wrapped_model.json(exclude_none=True, by_alias=True, indent=2))
def oscal_read(cls, path: pathlib.Path) -> 'OscalBaseModel': """ Read OSCAL objects. Handles the fact OSCAL wrap's top level elements and also deals with both yaml and json. """ # Create the wrapper model. alias = classname_to_alias(cls.__name__, 'json') content_type = FileContentType.to_content_type(path.suffix) if content_type == FileContentType.YAML: return cls.parse_obj(yaml.safe_load(path.open())[alias]) elif content_type == FileContentType.JSON: obj = load_file( path, json_loads=cls.__config__.json_loads, ) return cls.parse_obj(obj[alias]) else: raise err.TrestleError('Unknown file type')
def load_distributed( abs_path: Path, abs_trestle_root: Path, collection_type: Optional[Type[Any]] = None ) -> Tuple[Type[OscalBaseModel], str, Union[ OscalBaseModel, List[OscalBaseModel], Dict[str, OscalBaseModel]]]: """ Given path to a model, load the model. If the model is decomposed/split/distributed,the decomposed models are loaded recursively. Args: abs_path: The path to the file/directory to be loaded. abs_trestle_root: The trestle project root directory. collection_type: The type of collection model, if it is a collection model. typing.List is the only collection type handled or expected. Defaults to None. Returns: Return a tuple of Model Type (e.g. class 'trestle.oscal.catalog.Catalog'), Model Alias (e.g. 'catalog.metadata') and Instance of the Model. If the model is decomposed/split/distributed, the instance of the model contains the decomposed models loaded recursively. """ # if trying to load file that does not exist, load path instead if not abs_path.exists(): abs_path = abs_path.with_name(abs_path.stem) if not abs_path.exists(): raise TrestleNotFoundError(f'File {abs_path} not found for load.') if collection_type: # If the path contains a list type model if collection_type is list: return ModelUtils._load_list(abs_path, abs_trestle_root) # the only other collection type in OSCAL is dict, and it only applies to include_all, # which is too granular ever to be loaded by this routine else: raise TrestleError( f'Collection type {collection_type} not recognized for distributed load.' ) # Get current model primary_model_type, primary_model_alias = ModelUtils.get_stripped_model_type( abs_path, abs_trestle_root) primary_model_instance: Optional[OscalBaseModel] = None # is this an attempt to load an actual json or yaml file? content_type = FileContentType.path_to_content_type(abs_path) # if file is sought but it doesn't exist, ignore and load as decomposed model if FileContentType.is_readable_file( content_type) and abs_path.exists(): primary_model_instance = primary_model_type.oscal_read(abs_path) # Is model decomposed? decomposed_dir = abs_path.with_name(abs_path.stem) if decomposed_dir.exists(): aliases_not_to_be_stripped = [] instances_to_be_merged: List[OscalBaseModel] = [] for local_path in sorted( trestle.common.file_utils.iterdir_without_hidden_files( decomposed_dir)): if local_path.is_file(): model_type, model_alias, model_instance = ModelUtils.load_distributed( local_path, abs_trestle_root) aliases_not_to_be_stripped.append( model_alias.split('.')[-1]) instances_to_be_merged.append(model_instance) elif local_path.is_dir(): model_type, model_alias = ModelUtils.get_stripped_model_type( local_path, abs_trestle_root) # Only load the directory if it is a collection model. Otherwise do nothing - it gets loaded when # iterating over the model file # If a model is just a container for a list e.g. # class Foo(OscalBaseModel): noqa: E800 # __root__: List[Bar] noqa: E800 # You need to test whether first a root key exists # then whether the outer_type of root is a collection. # Alternative is to do a try except to avoid the error for an unknown key. if model_type.is_collection_container(): # This directory is a decomposed List or Dict collection_type = model_type.get_collection_type() model_type, model_alias, model_instance = ModelUtils.load_distributed( local_path, abs_trestle_root, collection_type) aliases_not_to_be_stripped.append( model_alias.split('.')[-1]) instances_to_be_merged.append(model_instance) primary_model_dict = {} if primary_model_instance is not None: primary_model_dict = primary_model_instance.__dict__ merged_model_type, merged_model_alias = ModelUtils.get_stripped_model_type( abs_path, abs_trestle_root, aliases_not_to_be_stripped) # The following use of top_level is to allow loading of a top level model by name only, e.g. MyCatalog # There may be a better overall way to approach this. top_level = len(merged_model_alias.split('.')) == 1 for i in range(len(aliases_not_to_be_stripped)): alias = aliases_not_to_be_stripped[i] instance = instances_to_be_merged[i] if hasattr( instance, '__dict__' ) and '__root__' in instance.__dict__ and isinstance( instance, OscalBaseModel): instance = instance.__dict__['__root__'] if top_level and not primary_model_dict: primary_model_dict = instance.__dict__ else: primary_model_dict[alias] = instance merged_model_instance = merged_model_type( **primary_model_dict) # type: ignore return merged_model_type, merged_model_alias, merged_model_instance return primary_model_type, primary_model_alias, primary_model_instance
def test_split_multi_level_dict( tmp_path: pathlib.Path, sample_target_def: ostarget.TargetDefinition) -> None: """Test for split_model method.""" # Assume we are running a command like below # trestle split -f target.yaml -e target-definition.targets.*.target-control-implementations.* content_type = FileContentType.YAML # prepare trestle project dir with the file target_def_dir, target_def_file = test_utils.prepare_trestle_project_dir( tmp_path, content_type, sample_target_def, test_utils.TARGET_DEFS_DIR) file_ext = FileContentType.to_file_extension(content_type) # read the model from file target_def: ostarget.TargetDefinition = ostarget.TargetDefinition.oscal_read( target_def_file) element = Element(target_def) element_args = [ 'target-definition.targets.*.target-control-implementations.*' ] element_paths = test_utils.prepare_element_paths(target_def_dir, element_args) expected_plan = Plan() # extract values targets: dict = element.get_at(element_paths[0]) targets_dir = target_def_dir / element_paths[0].to_file_path() # split every targets for key in targets: # individual target dir target: ostarget.Target = targets[key] target_element = Element(targets[key]) model_type = utils.classname_to_alias(type(target).__name__, 'json') dir_prefix = key target_dir_name = f'{dir_prefix}{const.IDX_SEP}{model_type}' target_file = targets_dir / f'{target_dir_name}{file_ext}' # target control impl dir for the target target_ctrl_impls: dict = target_element.get_at(element_paths[1]) targets_ctrl_dir = targets_dir / element_paths[1].to_file_path( root_dir=target_dir_name) for i, target_ctrl_impl in enumerate(target_ctrl_impls): model_type = utils.classname_to_alias( type(target_ctrl_impl).__name__, 'json') file_prefix = str(i).zfill(const.FILE_DIGIT_PREFIX_LENGTH) file_name = f'{file_prefix}{const.IDX_SEP}{model_type}{file_ext}' file_path = targets_ctrl_dir / file_name expected_plan.add_action(CreatePathAction(file_path)) expected_plan.add_action( WriteFileAction(file_path, Element(target_ctrl_impl), content_type)) # write stripped target model stripped_target = target.stripped_instance( stripped_fields_aliases=[element_paths[1].get_element_name()]) expected_plan.add_action(CreatePathAction(target_file)) expected_plan.add_action( WriteFileAction(target_file, Element(stripped_target), content_type)) root_file = target_def_dir / f'target-definition{file_ext}' remaining_root = element.get().stripped_instance( stripped_fields_aliases=[element_paths[0].get_element_name()]) expected_plan.add_action(CreatePathAction(root_file, True)) expected_plan.add_action( WriteFileAction(root_file, Element(remaining_root), content_type)) split_plan = SplitCmd.split_model(target_def, element_paths, target_def_dir, content_type) assert expected_plan == split_plan
def to_model_file_name(model_obj: OscalBaseModel, file_prefix: str, content_type: FileContentType) -> str: """Return the file name for the item.""" file_ext = FileContentType.to_file_extension(content_type) model_type = utils.classname_to_alias(type(model_obj).__name__, 'json') file_name = f'{file_prefix}{const.IDX_SEP}{model_type}{file_ext}' return file_name
def test_merge_plan_simple_case(testdata_dir, tmp_trestle_dir): """Test '$mycatalog$ trestle merge -e catalog.back-matter'.""" # Assume we are running a command like below # trestle merge -e catalog.back-matter content_type = FileContentType.JSON fext = FileContentType.to_file_extension(content_type) # prepare trestle project dir with the file test_utils.ensure_trestle_config_dir(tmp_trestle_dir) test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs' catalogs_dir = Path('catalogs/') mycatalog_dir = catalogs_dir / 'mycatalog' catalog_dir = mycatalog_dir / 'catalog' # Copy files from test/data/split_merge/step4 shutil.rmtree(catalogs_dir) shutil.copytree(test_data_source, catalogs_dir) os.chdir(mycatalog_dir) catalog_file = Path(f'catalog{fext}').resolve() catalog_dir = Path('catalog/') back_matter_file = (catalog_dir / f'back-matter{fext}').resolve() assert catalog_file.exists() assert back_matter_file.exists() # Read files # The destination file/model needs to be loaded in a stripped model stripped_catalog_type, _ = ModelUtils.get_stripped_model_type( catalog_file.resolve(), tmp_trestle_dir) stripped_catalog = stripped_catalog_type.oscal_read(catalog_file) # Back-matter model needs to be complete and if it is decomposed, needs to be merged recursively first back_matter = common.BackMatter.oscal_read(back_matter_file) # Back-matter needs to be inserted in a stripped Catalog that does NOT exclude the back-matter fields merged_catalog_type, merged_catalog_alias = ModelUtils.get_stripped_model_type( catalog_file.resolve(), tmp_trestle_dir, aliases_not_to_be_stripped=['back-matter']) merged_dict = stripped_catalog.__dict__ merged_dict['back-matter'] = back_matter merged_catalog = merged_catalog_type(**merged_dict) element = Element(merged_catalog, merged_catalog_alias) # Create hand-crafter merge plan reset_destination_action = CreatePathAction(catalog_file, clear_content=True) write_destination_action = WriteFileAction(catalog_file, element, content_type=content_type) delete_element_action = RemovePathAction(back_matter_file) expected_plan: Plan = Plan() expected_plan.add_action(reset_destination_action) expected_plan.add_action(write_destination_action) expected_plan.add_action(delete_element_action) # Call merge() generated_plan = MergeCmd.merge(Path.cwd(), ElementPath('catalog.back-matter'), tmp_trestle_dir) # Assert the generated plan matches the expected plan' assert generated_plan == expected_plan