def test_load_list(testdata_dir, tmp_trestle_dir): """Test loading a list recursively.""" # prepare trestle project dir with the file test_utils.ensure_trestle_config_dir(tmp_trestle_dir) test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs' catalogs_dir = tmp_trestle_dir / 'catalogs' mycatalog_dir = catalogs_dir / 'mycatalog' catalog_dir = mycatalog_dir / 'catalog' # Copy files from test/data/split_merge/step4 shutil.rmtree(catalogs_dir) shutil.copytree(test_data_source, catalogs_dir) actual_model_type, actual_model_alias, actual_roles = ModelUtils._load_list( catalog_dir / 'metadata' / 'roles', tmp_trestle_dir) expected_roles = [ Role.oscal_read(catalog_dir / 'metadata/roles/00000__role.json'), Role.oscal_read(catalog_dir / 'metadata/roles/00001__role.json') ] expected_model_type, _ = ModelUtils.get_stripped_model_type( (catalog_dir / 'metadata/roles').resolve(), tmp_trestle_dir) assert actual_model_type.__signature__ == expected_model_type.__signature__ assert actual_model_alias == 'catalog.metadata.roles' assert test_utils.list_unordered_equal(actual_roles, expected_roles)
def test_load_distributed(testdata_dir, tmp_trestle_dir): """Test massive distributed load, that includes recursive load and list.""" # prepare trestle project dir with the file test_utils.ensure_trestle_config_dir(tmp_trestle_dir) test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs' catalogs_dir = tmp_trestle_dir / 'catalogs' mycatalog_dir = catalogs_dir / 'mycatalog' catalog_file = mycatalog_dir / 'catalog.json' # Copy files from test/data/split_merge/step4 shutil.rmtree(catalogs_dir) shutil.copytree(test_data_source, catalogs_dir) actual_model_type, actual_model_alias, actual_model_instance = ModelUtils.load_distributed( catalog_file, tmp_trestle_dir) expected_model_instance = Catalog.oscal_read( testdata_dir / 'split_merge/load_distributed/catalog.json') assert actual_model_type == Catalog assert actual_model_alias == 'catalog' assert test_utils.models_are_equivalent(expected_model_instance, actual_model_instance) # confirm it fails attempting to load collection type that is not a list with pytest.raises(TrestleError): actual_model_type, actual_model_alias, actual_model_instance = ModelUtils.load_distributed( catalog_file, tmp_trestle_dir, Dict)
def assemble_model(cls, model_alias: str, args: argparse.Namespace) -> int: """Assemble a top level OSCAL model within the trestle dist directory.""" log.set_log_level_from_args(args) logger.info(f'Assembling models of type {model_alias}.') trestle_root = args.trestle_root # trestle root is set via command line in args. Default is cwd. if not trestle_root or not file_utils.is_valid_project_root( args.trestle_root): raise TrestleRootError( f'Given directory {trestle_root} is not a trestle project.') model_names = [] if args.name: model_names = [args.name] logger.info( f'Assembling single model of type {model_alias}: {args.name}.') else: model_names = ModelUtils.get_models_of_type( model_alias, trestle_root) nmodels = len(model_names) logger.info( f'Assembling {nmodels} found models of type {model_alias}.') if len(model_names) == 0: logger.info(f'No models found to assemble of type {model_alias}.') return CmdReturnCodes.SUCCESS.value for model_name in model_names: # contruct path to the model file name root_model_dir = trestle_root / ModelUtils.model_type_to_model_dir( model_alias) model_file_type = file_utils.get_contextual_file_type( root_model_dir / model_name) model_file_name = f'{model_alias}{FileContentType.to_file_extension(model_file_type)}' root_model_filepath = root_model_dir / model_name / model_file_name if not root_model_filepath.exists(): raise TrestleError( f'No top level model file at {root_model_dir}') # distributed load _, _, assembled_model = ModelUtils.load_distributed( root_model_filepath, args.trestle_root) plural_alias = ModelUtils.model_type_to_model_dir(model_alias) assembled_model_dir = trestle_root / const.TRESTLE_DIST_DIR / plural_alias assembled_model_filepath = assembled_model_dir / f'{model_name}.{args.extension}' plan = Plan() plan.add_action(CreatePathAction(assembled_model_filepath, True)) plan.add_action( WriteFileAction( assembled_model_filepath, Element(assembled_model), FileContentType.to_content_type(f'.{args.extension}'))) plan.execute() return CmdReturnCodes.SUCCESS.value
def replicate_object(cls, model_alias: str, args: argparse.Namespace) -> int: """ Core replicate routine invoked by subcommands. Args: model_alias: Name of the top level model in the trestle directory. args: CLI arguments Returns: A return code that can be used as standard posix codes. 0 is success. """ logger.debug('Entering replicate_object.') # 1 Bad working directory if not running from current working directory trestle_root = args.trestle_root # trestle root is set via command line in args. Default is cwd. if not trestle_root or not file_utils.is_valid_project_root(trestle_root): raise TrestleError(f'Given directory: {trestle_root} is not a trestle project.') plural_path = ModelUtils.model_type_to_model_dir(model_alias) # 2 Check that input file given exists. input_file_stem = trestle_root / plural_path / args.name / model_alias content_type = FileContentType.path_to_content_type(input_file_stem) if content_type == FileContentType.UNKNOWN: raise TrestleError( f'Input file {args.name} has no json or yaml file at expected location {input_file_stem}.' ) input_file = input_file_stem.with_suffix(FileContentType.to_file_extension(content_type)) # 3 Distributed load from file _, model_alias, model_instance = ModelUtils.load_distributed(input_file, trestle_root) rep_model_path = trestle_root / plural_path / args.output / ( model_alias + FileContentType.to_file_extension(content_type) ) if rep_model_path.exists(): raise TrestleError(f'OSCAL file to be replicated here: {rep_model_path} exists.') if args.regenerate: logger.debug(f'regenerating uuids for model {input_file}') model_instance, uuid_lut, n_refs_updated = ModelUtils.regenerate_uuids(model_instance) logger.debug(f'{len(uuid_lut)} uuids generated and {n_refs_updated} references updated') # 4 Prepare actions and plan top_element = Element(model_instance) create_action = CreatePathAction(rep_model_path, True) write_action = WriteFileAction(rep_model_path, top_element, content_type) # create a plan to create the directory and imported file. replicate_plan = Plan() replicate_plan.add_action(create_action) replicate_plan.add_action(write_action) replicate_plan.execute() return CmdReturnCodes.SUCCESS.value
def test_full_path_for_top_level_model( tmp_trestle_dir: pathlib.Path, sample_catalog_minimal: catalog.Catalog) -> None: """Test full path for top level model.""" ModelUtils.save_top_level_model(sample_catalog_minimal, tmp_trestle_dir, 'mycat', FileContentType.JSON) cat_path = ModelUtils.full_path_for_top_level_model( tmp_trestle_dir, 'mycat', catalog.Catalog) assert cat_path == tmp_trestle_dir / 'catalogs/mycat/catalog.json'
def test_model_type_to_model_dir() -> None: """Test model type to model dir.""" assert ModelUtils.model_type_to_model_dir('catalog') == 'catalogs' try: ModelUtils.model_type_to_model_dir('foo') except Exception: pass else: assert 'test failed'
def test_update_last_modified( sample_catalog_rich_controls: catalog.Catalog) -> None: """Test update timestamps.""" hour_ago = datetime.now().astimezone() - timedelta( seconds=const.HOUR_SECONDS) ModelUtils.update_last_modified(sample_catalog_rich_controls, hour_ago) assert sample_catalog_rich_controls.metadata.last_modified.__root__ == hour_ago ModelUtils.update_last_modified(sample_catalog_rich_controls) assert ModelUtils.model_age( sample_catalog_rich_controls) < test_utils.NEW_MODEL_AGE_SECONDS
def test_split_merge(testdata_dir: pathlib.Path, tmp_trestle_dir: pathlib.Path) -> None: """Test merging data that has been split using the split command- to ensure symmetry.""" # trestle split -f catalog.json -e catalog.groups.*.controls.* # prepare trestle project dir with the file test_utils.ensure_trestle_config_dir(tmp_trestle_dir) test_data_source = testdata_dir / 'split_merge/step0-merged_catalog/catalogs' catalogs_dir = Path('catalogs/') mycatalog_dir = catalogs_dir / 'mycatalog' # Copy files from test/data/split_merge/step4 shutil.rmtree(catalogs_dir) shutil.copytree(test_data_source, catalogs_dir) os.chdir(mycatalog_dir) catalog_file = Path('catalog.json') # Read and store the catalog before split stripped_catalog_type, _ = ModelUtils.get_stripped_model_type( catalog_file.resolve(), tmp_trestle_dir) pre_split_catalog = stripped_catalog_type.oscal_read(catalog_file) assert 'groups' in pre_split_catalog.__fields__.keys() # Split the catalog args = argparse.Namespace(name='split', file='catalog.json', verbose=1, element='catalog.groups.*.controls.*', trestle_root=tmp_trestle_dir) split = SplitCmd()._run(args) assert split == 0 interim_catalog_type, _ = ModelUtils.get_stripped_model_type( catalog_file.resolve(), tmp_trestle_dir) interim_catalog = interim_catalog_type.oscal_read(catalog_file.resolve()) assert 'groups' not in interim_catalog.__fields__.keys() # Merge everything back into the catalog # Equivalent to trestle merge -e catalog.* args = argparse.Namespace(name='merge', element='catalog.*', verbose=2, trestle_root=tmp_trestle_dir) rc = MergeCmd()._run(args) assert rc == 0 # Check both the catalogs are the same. post_catalog_type, _ = ModelUtils.get_stripped_model_type( catalog_file.resolve(), tmp_trestle_dir) post_merge_catalog = post_catalog_type.oscal_read(catalog_file) assert post_merge_catalog == pre_split_catalog
def test_ssp_bad_control_id(tmp_trestle_dir: pathlib.Path) -> None: """Test ssp gen when profile has bad control id.""" profile = prof.Profile.oscal_read(test_utils.JSON_TEST_DATA_PATH / 'profile_bad_control.json') ModelUtils.save_top_level_model(profile, tmp_trestle_dir, 'bad_prof', FileContentType.JSON) args = argparse.Namespace(trestle_root=tmp_trestle_dir, profile='bad_prof', output='my_ssp', verbose=0, sections=None, yaml_header=None) ssp_cmd = SSPGenerate() assert ssp_cmd._run(args) == 1
def test_ssp_generate_generate(tmp_trestle_dir: pathlib.Path) -> None: """Test repeat generate with various controls including statement with no parts.""" cat_name = 'complex_cat' prof_name = 'my_prof' ssp_name = 'my_ssp' catalog = test_utils.generate_complex_catalog() ModelUtils.save_top_level_model(catalog, tmp_trestle_dir, cat_name, FileContentType.JSON) test_utils.create_profile_in_trestle_dir(tmp_trestle_dir, cat_name, prof_name) args = argparse.Namespace(trestle_root=tmp_trestle_dir, profile=prof_name, output=ssp_name, verbose=0, sections=None, yaml_header=None, overwrite_header_values=False, allowed_sections=None) # generate the markdown with no implementation response text ssp_cmd = SSPGenerate() assert ssp_cmd._run(args) == 0 # insert implementation text into the high level statement of a control that has no sub-parts control_path = tmp_trestle_dir / ssp_name / 'test-1.md' test_utils.insert_text_in_file(control_path, 'control test-1', '\nHello there') control_a1_path = tmp_trestle_dir / ssp_name / 'a-1.md' test_utils.insert_text_in_file(control_a1_path, const.SSP_ADD_IMPLEMENTATION_PREFIX, 'Text with prompt removed') test_utils.delete_line_in_file(control_a1_path, const.SSP_ADD_IMPLEMENTATION_PREFIX) assert ssp_cmd._run(args) == 0 # confirm the added text is still there assert test_utils.confirm_text_in_file(control_path, 'control test-1', 'Hello there') # confirm added text in a1 is there assert test_utils.confirm_text_in_file(control_a1_path, '## Implementation', 'Text with prompt removed') # confirm prompt is not there assert not test_utils.confirm_text_in_file( control_a1_path, '## Implementation', const.SSP_ADD_IMPLEMENTATION_PREFIX)
def test_profile_resolver(tmp_trestle_dir: pathlib.Path) -> None: """Test the resolver.""" test_utils.setup_for_multi_profile(tmp_trestle_dir, False, True) prof_a_path = ModelUtils.path_for_top_level_model( tmp_trestle_dir, 'test_profile_a', prof.Profile, FileContentType.JSON ) cat = ProfileResolver.get_resolved_profile_catalog(tmp_trestle_dir, prof_a_path) interface = CatalogInterface(cat) # added part ac-1_expevid from prof a list1 = find_string_in_all_controls_prose(interface, 'Detailed evidence logs') # modify param ac-3.3_prm_2 in prof b list2 = find_string_in_all_controls_prose(interface, 'full and complete compliance') assert len(list1) == 1 assert len(list2) == 1 assert interface.get_count_of_controls_in_catalog(False) == 6 assert interface.get_count_of_controls_in_catalog(True) == 7 assert len(cat.controls) == 4 assert interface.get_dependent_control_ids('ac-3') == ['ac-3.3'] control = interface.get_control('a-1') assert control.parts[0].parts[0].id == 'a-1_deep' assert control.parts[0].parts[0].prose == 'Extra added part in subpart'
def test_get_control_param_dict(tmp_trestle_dir: pathlib.Path) -> None: """Test getting the param dict of a control.""" test_utils.setup_for_multi_profile(tmp_trestle_dir, False, True) prof_a_path = ModelUtils.path_for_top_level_model(tmp_trestle_dir, 'test_profile_a', prof.Profile, FileContentType.JSON) catalog = ProfileResolver.get_resolved_profile_catalog( tmp_trestle_dir, prof_a_path) catalog_interface = CatalogInterface(catalog) control = catalog_interface.get_control('ac-1') param_dict = ControlIOReader.get_control_param_dict(control, False) # confirm profile value is used assert ControlIOReader.param_values_as_str( param_dict['ac-1_prm_1']) == 'all alert personnel' # confirm original param label is used since no value was assigned assert ControlIOReader.param_to_str(param_dict['ac-1_prm_7'], ParameterRep.VALUE_OR_LABEL_OR_CHOICES ) == 'organization-defined events' param = control.params[0] param.values = None param.select = common.ParameterSelection( how_many=common.HowMany.one_or_more, choice=['choice 1', 'choice 2']) param_dict = ControlIOReader.get_control_param_dict(control, False) assert ControlIOReader.param_to_str( param_dict['ac-1_prm_1'], ParameterRep.VALUE_OR_LABEL_OR_CHOICES) == 'choice 1, choice 2'
def _replace_modify_set_params(profile: prof.Profile, param_dict: Dict[str, Any]) -> bool: """ Replace the set_params in the profile with list and values from markdown. Notes: Need to check values in the original catalogs and only create SetParameters for values that change. Returns whether or not change was made. """ changed = False if param_dict: if not profile.modify: profile.modify = prof.Modify() new_set_params: List[prof.SetParameter] = [] for key, param_dict in param_dict.items(): if param_dict: param_dict['id'] = key param = ModelUtils.dict_to_parameter(param_dict) new_set_params.append( prof.SetParameter(param_id=key, label=param.label, values=param.values, select=param.select)) if profile.modify.set_parameters != new_set_params: changed = True profile.modify.set_parameters = new_set_params return changed
def test_assemble_catalog_all( testdata_dir: pathlib.Path, tmp_trestle_dir: pathlib.Path, monkeypatch: MonkeyPatch ) -> None: """Test assembling all catalogs in trestle dir.""" shutil.rmtree(pathlib.Path('dist')) catalogs_dir = tmp_trestle_dir / 'catalogs' my_names = ['mycatalog1', 'mycatalog2', 'mycatalog3'] for my_name in my_names: test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs/mycatalog' shutil.copytree(test_data_source, catalogs_dir / my_name) testargs = ['trestle', 'assemble', 'catalog', '-t', '-x', 'json'] monkeypatch.setattr(sys, 'argv', testargs) rc = Trestle().run() assert rc == 0 # Read assembled model for my_name in my_names: _, _, expected_model = ModelUtils.load_distributed(catalogs_dir / f'{my_name}/catalog.json', tmp_trestle_dir) actual_model = Catalog.oscal_read(pathlib.Path(f'dist/catalogs/{my_name}.json')) assert actual_model == expected_model testargs = ['trestle', 'assemble', 'profile', '-t', '-x', 'json'] # Tests should pass on empty set of directories. monkeypatch.setattr(sys, 'argv', testargs) rc = Trestle().run() assert rc == 0
def test_get_control_and_group_info_from_catalog(tmp_trestle_dir: pathlib.Path) -> None: """Test get all groups from the catalog.""" test_utils.setup_for_multi_profile(tmp_trestle_dir, False, True) prof_a_path = ModelUtils.path_for_top_level_model( tmp_trestle_dir, 'test_profile_a', prof.Profile, FileContentType.JSON ) catalog = ProfileResolver.get_resolved_profile_catalog(tmp_trestle_dir, prof_a_path) cat_interface = CatalogInterface(catalog) all_groups_top = cat_interface.get_all_controls_from_catalog(recurse=False) assert len(list(all_groups_top)) == 6 all_groups_rec = cat_interface.get_all_controls_from_catalog(recurse=True) assert len(list(all_groups_rec)) == 7 all_group_ids = cat_interface.get_group_ids() assert len(all_group_ids) == 1 statement_label, part = cat_interface.get_statement_label_if_exists('ac-1', 'ac-1_smt.c.2') assert statement_label == '2.' assert part.id == 'ac-1_smt.c.2' cat_path = cat_interface._get_control_path('ac-2') assert cat_path[0] == 'ac' assert len(cat_path) == 1
def test_merge_everything_into_catalog_with_hidden_files_in_folders( testdata_dir, tmp_trestle_dir): """Test trestle merge -e 'catalog.*' when metadata and catalog are split and hidden files are present.""" # Assume we are running a command like below # trestle merge -e catalog.* content_type = FileContentType.JSON fext = FileContentType.to_file_extension(content_type) # prepare trestle project dir with the file test_utils.ensure_trestle_config_dir(tmp_trestle_dir) test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs' catalogs_dir = Path('catalogs/') mycatalog_dir = catalogs_dir / 'mycatalog' # Copy files from test/data/split_merge/step4 shutil.rmtree(catalogs_dir) shutil.copytree(test_data_source, catalogs_dir) # Change directory to mycatalog_dir os.chdir(mycatalog_dir) catalog_file = Path(f'catalog{fext}').resolve() assert catalog_file.exists() # Read files # Create hand-crafter merge plan expected_plan: Plan = Plan() reset_destination_action = CreatePathAction(catalog_file, clear_content=True) expected_plan.add_action(reset_destination_action) _, _, merged_catalog_instance = ModelUtils.load_distributed( catalog_file, tmp_trestle_dir) element = Element(merged_catalog_instance) write_destination_action = WriteFileAction(catalog_file, element, content_type=content_type) expected_plan.add_action(write_destination_action) delete_element_action = RemovePathAction(Path('catalog').resolve()) expected_plan.add_action(delete_element_action) test_utils.make_hidden_file(tmp_trestle_dir / 'catalogs/mycatalog/.DS_Store') test_utils.make_hidden_file(tmp_trestle_dir / 'catalogs/mycatalog/catalog/.DS_Store') test_utils.make_hidden_file( tmp_trestle_dir / 'catalogs/mycatalog/catalog/metadata/.DS_Store') test_utils.make_hidden_file(tmp_trestle_dir / 'catalogs/mycatalog/catalog/groups/.DS_Store') # Call merge() generated_plan = MergeCmd.merge(Path.cwd(), ElementPath('catalog.*'), tmp_trestle_dir) # Assert the generated plan matches the expected plan' assert generated_plan == expected_plan
def __init__(self, root_dir: pathlib.Path, model_type: Type[OscalBaseModel], name: str) -> None: """Initialize repository OSCAL model object.""" if not file_utils.is_valid_project_root(root_dir): raise TrestleError(f'Provided root directory {str(root_dir)} is not a valid Trestle root directory.') self._root_dir = root_dir self._model_type = model_type self._model_name = name # set model alais and dir self.model_alias = classname_to_alias(self._model_type.__name__, AliasMode.JSON) if parser.to_full_model_name(self.model_alias) is None: raise TrestleError(f'Given model {self.model_alias} is not a top level model.') plural_path = ModelUtils.model_type_to_model_dir(self.model_alias) self.model_dir = self._root_dir / plural_path / self._model_name if not self.model_dir.exists() or not self.model_dir.is_dir(): raise TrestleError(f'Model dir {self._model_name} does not exist.') file_content_type = FileContentType.path_to_content_type(self.model_dir / self.model_alias) if file_content_type == FileContentType.UNKNOWN: raise TrestleError(f'Model file for model {self._model_name} does not exist.') self.file_content_type = file_content_type filepath = pathlib.Path( self.model_dir, self.model_alias + FileContentType.path_to_file_extension(self.model_dir / self.model_alias) ) self.filepath = filepath
def add_from_args(self, args: argparse.Namespace) -> int: """Parse args for add element to file.""" file_path = pathlib.Path(args.file).resolve() # Get parent model and then load json into parent model parent_model, _ = ModelUtils.get_stripped_model_type(file_path, args.trestle_root) parent_object = parent_model.oscal_read(file_path) parent_element = Element(parent_object, classname_to_alias(parent_model.__name__, AliasMode.JSON)) add_plan = Plan() # Do _add for each element_path specified in args element_paths: List[str] = args.element.split(',') for elm_path_str in element_paths: element_path = ElementPath(elm_path_str) update_action, parent_element = self.add(element_path, parent_element, args.include_optional_fields) add_plan.add_action(update_action) create_action = CreatePathAction(file_path, True) # this will output json or yaml based on type of input file write_action = WriteFileAction(file_path, parent_element, FileContentType.to_content_type(file_path.suffix)) add_plan.add_action(create_action) add_plan.add_action(write_action) add_plan.execute() return CmdReturnCodes.SUCCESS.value
def delete_model(self, model_type: Type[OscalBaseModel], name: str) -> bool: """Delete an OSCAL model from repository.""" logger.debug(f'Deleting model {name} of type {model_type.__name__}.') model_alias = classname_to_alias(model_type.__name__, AliasMode.JSON) if parser.to_full_model_name(model_alias) is None: raise TrestleError(f'Given model {model_alias} is not a top level model.') plural_path = ModelUtils.model_type_to_model_dir(model_alias) desired_model_dir = self._root_dir / plural_path / name if not desired_model_dir.exists() or not desired_model_dir.is_dir(): raise TrestleError(f'Model {name} does not exist.') shutil.rmtree(desired_model_dir) # remove model from dist directory if it exists dist_model_dir = self._root_dir / const.TRESTLE_DIST_DIR / plural_path file_content_type = FileContentType.path_to_content_type(dist_model_dir / name) if file_content_type != FileContentType.UNKNOWN: file_path = pathlib.Path( dist_model_dir, name + FileContentType.path_to_file_extension(dist_model_dir / name) ) logger.debug(f'Deleting model {name} from dist directory.') os.remove(file_path) logger.debug(f'Model {name} deleted successfully.') return True
def import_model(self, model: OscalBaseModel, name: str, content_type='json') -> ManagedOSCAL: """Import OSCAL object into trestle repository.""" logger.debug(f'Importing model {name} of type {model.__class__.__name__}.') model_alias = classname_to_alias(model.__class__.__name__, AliasMode.JSON) if parser.to_full_model_name(model_alias) is None: raise TrestleError(f'Given model {model_alias} is not a top level model.') # Work out output directory and file plural_path = ModelUtils.model_type_to_model_dir(model_alias) desired_model_dir = self._root_dir / plural_path desired_model_path = desired_model_dir / name / (model_alias + '.' + content_type) desired_model_path = desired_model_path.resolve() if desired_model_path.exists(): raise TrestleError(f'OSCAL file to be created here: {desired_model_path} exists.') content_type = FileContentType.to_content_type(pathlib.Path(desired_model_path).suffix) # Prepare actions top_element = Element(model) create_action = CreatePathAction(desired_model_path, True) write_action = WriteFileAction(desired_model_path, top_element, content_type) # create a plan to create the directory and imported file. import_plan = Plan() import_plan.add_action(create_action) import_plan.add_action(write_action) import_plan.execute() # Validate the imported file, rollback if unsuccessful success = False errmsg = '' try: success = self.validate_model(model.__class__, name) if not success: errmsg = f'Validation of model {name} did not pass' logger.error(errmsg) except Exception as err: logger.error(errmsg) errmsg = f'Import of model {name} failed. Validation failed with error: {err}' if not success: # rollback in case of validation error or failure logger.debug(f'Rolling back import of model {name} to {desired_model_path}') try: import_plan.rollback() except TrestleError as err: logger.error(f'Failed to rollback: {err}. Remove {desired_model_path} to resolve state.') else: logger.debug(f'Successful rollback of import to {desired_model_path}') # raise trestle error raise TrestleError(errmsg) # all well; model was imported and validated successfully logger.debug(f'Model {name} of type {model.__class__.__name__} imported successfully.') return ManagedOSCAL(self._root_dir, model.__class__, name)
def list_models(self, model_type: Type[OscalBaseModel]) -> List[str]: """List models of a given type in trestle repository.""" logger.debug(f'Listing models of type {model_type.__name__}.') model_alias = classname_to_alias(model_type.__name__, AliasMode.JSON) if parser.to_full_model_name(model_alias) is None: raise TrestleError(f'Given model {model_alias} is not a top level model.') models = ModelUtils.get_models_of_type(model_alias, self._root_dir) return models
def test_ssp_generate_tutorial(tmp_trestle_dir: pathlib.Path) -> None: """Test the ssp generator with the nist tutorial catalog and profile.""" catalog = cat.Catalog.oscal_read(test_utils.JSON_TEST_DATA_PATH / 'nist_tutorial_catalog.json') ModelUtils.save_top_level_model(catalog, tmp_trestle_dir, 'nist_tutorial_catalog', FileContentType.JSON) profile = prof.Profile.oscal_read(test_utils.JSON_TEST_DATA_PATH / 'nist_tutorial_profile.json') ModelUtils.save_top_level_model(profile, tmp_trestle_dir, 'nist_tutorial_profile', FileContentType.JSON) ssp_gen = SSPGenerate() args = argparse.Namespace(trestle_root=tmp_trestle_dir, profile='nist_tutorial_profile', output='ssp_md', sections=None, overwrite_header_values=False, verbose=0, yaml_header=None, allowed_sections=None) assert ssp_gen._run(args) == 0 ssp_assem = SSPAssemble() args = argparse.Namespace(trestle_root=tmp_trestle_dir, output='ssp_json', markdown='ssp_md', verbose=0, name=None, version=None, regenerate=False) assert ssp_assem._run(args) == 0 json_ssp: ossp.SystemSecurityPlan json_ssp, _ = ModelUtils.load_top_level_model(tmp_trestle_dir, 'ssp_json', ossp.SystemSecurityPlan) comp_def = json_ssp.system_implementation.components[0] assert comp_def.title == 'This System' assert comp_def.status.state == ossp.State1.under_development imp_reqs: List[ ossp. ImplementedRequirement] = json_ssp.control_implementation.implemented_requirements assert len(imp_reqs) == 2 assert imp_reqs[0].control_id == 's1.1.1' assert imp_reqs[1].control_id == 's2.1.2'
def test_stripped_model(tmp_path: pathlib.Path, keep_cwd: pathlib.Path, sample_catalog_minimal: Catalog, monkeypatch: MonkeyPatch) -> None: """Test CreateCmd creating element for stripped model.""" content_type = FileContentType.JSON catalog_def_dir, catalog_def_file = test_utils.prepare_trestle_project_dir( tmp_path, content_type, sample_catalog_minimal, test_utils.CATALOGS_DIR) os.chdir(catalog_def_dir) testargs = [ 'trestle', 'split', '-f', 'catalog.json', '-e', 'catalog.metadata' ] monkeypatch.setattr(sys, 'argv', testargs) assert Trestle().run() == 0 # Now that the metadata has been split, add of catalog.metadata.roles will error, # but add of catalog.back-matter will pass testargs = [ 'trestle', 'create', '-f', 'catalog.json', '-e', 'catalog.metadata.roles' ] monkeypatch.setattr(sys, 'argv', testargs) assert Trestle().run() == 1 testargs = [ 'trestle', 'create', '-f', 'catalog.json', '-e', 'catalog.back-matter' ] current_model, _ = ModelUtils.get_stripped_model_type( catalog_def_dir, tmp_path) current_catalog = current_model.oscal_read(pathlib.Path('catalog.json')) current_catalog.back_matter = BackMatter() expected_catalog = current_catalog monkeypatch.setattr(sys, 'argv', testargs) assert Trestle().run() == 0 actual_model, _ = ModelUtils.get_stripped_model_type( catalog_def_dir, tmp_path) actual_catalog = actual_model.oscal_read(pathlib.Path('catalog.json')) assert expected_catalog == actual_catalog
def test_add_props_before_after_ok(tmp_trestle_dir: pathlib.Path) -> None: """ Test for property addition behavior with before or after. Properties added with before or after will default to starting or ending. """ test_utils.setup_for_multi_profile(tmp_trestle_dir, False, True) prof_g_path = ModelUtils.path_for_top_level_model( tmp_trestle_dir, 'test_profile_g', prof.Profile, FileContentType.JSON ) _ = ProfileResolver.get_resolved_profile_catalog(tmp_trestle_dir, prof_g_path)
def model_is_valid(self, model: OscalBaseModel) -> bool: """ Test if the model is valid and contains no duplicate uuids. args: model: An Oscal model that can be passed to the validator. returns: True (valid) if the model does not contain duplicate uuid's. """ return ModelUtils.has_no_duplicate_values_by_name(model, 'uuid')
def test_get_models_of_type(tmp_trestle_dir) -> None: """Test fs.get_models_of_type().""" create_sample_catalog_project(tmp_trestle_dir) catalogs_dir = tmp_trestle_dir.resolve() / 'catalogs' components_dir = tmp_trestle_dir.resolve() / 'component-definitions' # mycatalog is already there (catalogs_dir / 'mycatalog2').mkdir() (catalogs_dir / '.myfile').touch() (components_dir / 'my_component').mkdir() models = ModelUtils.get_models_of_type('catalog', tmp_trestle_dir) assert len(models) == 2 assert 'mycatalog' in models assert 'mycatalog2' in models all_models = ModelUtils.get_all_models(tmp_trestle_dir) assert len(all_models) == 3 assert ('catalog', 'mycatalog') in all_models assert ('catalog', 'mycatalog2') in all_models assert ('component-definition', 'my_component') in all_models with pytest.raises(TrestleError): ModelUtils.get_models_of_type('foo', tmp_trestle_dir)
def test_ssp_assemble_header_metadata(tmp_trestle_dir: pathlib.Path) -> None: """Test parsing of metadata from yaml header.""" catalog = test_utils.generate_complex_catalog() ModelUtils.save_top_level_model(catalog, tmp_trestle_dir, 'complex_cat', FileContentType.JSON) prof_name = 'test_profile_c' ssp_name = 'my_ssp' profile = prof.Profile.oscal_read(test_utils.JSON_TEST_DATA_PATH / f'{prof_name}.json') ModelUtils.save_top_level_model(profile, tmp_trestle_dir, prof_name, FileContentType.JSON) header_path = test_utils.YAML_TEST_DATA_PATH / 'header_with_metadata.yaml' args = argparse.Namespace(trestle_root=tmp_trestle_dir, profile=prof_name, output=ssp_name, verbose=0, sections=None, yaml_header=header_path, overwrite_header_values=False, allowed_sections=None) # generate the markdown with header content ssp_cmd = SSPGenerate() assert ssp_cmd._run(args) == 0 # create ssp from the markdown ssp_assemble = SSPAssemble() args = argparse.Namespace(trestle_root=tmp_trestle_dir, markdown=ssp_name, output=ssp_name, verbose=0, name=None, version=None, regenerate=False) assert ssp_assemble._run(args) == 0 # read the assembled ssp and confirm roles are in metadata ssp, _ = ModelUtils.load_top_level_model(tmp_trestle_dir, ssp_name, ossp.SystemSecurityPlan, FileContentType.JSON) assert len(ssp.metadata.roles) == 2
def test_get_root_model() -> None: """Test looking for the root model of a trestle oscal module.""" with pytest.raises(err.TrestleError): ModelUtils.get_root_model('invalid') with pytest.raises(err.TrestleError): ModelUtils.get_root_model('pydantic') malias_to_mtype = { const.MODEL_TYPE_CATALOG: catalog.Catalog, const.MODEL_TYPE_PROFILE: profile.Profile, const.MODEL_TYPE_COMPDEF: component.ComponentDefinition, const.MODEL_TYPE_SSP: ssp.SystemSecurityPlan, const.MODEL_TYPE_A_PLAN: assessment_plan.AssessmentPlan, const.MODEL_TYPE_A_RESULT: assessment_results.AssessmentResults, const.MODEL_TYPE_POAM: poam.PlanOfActionAndMilestones } for key in malias_to_mtype: module_name = malias_to_mtype[key].__module__ model_type, model_alias = ModelUtils.get_root_model(module_name) assert model_type == malias_to_mtype[key] assert model_alias == key
def test_catalog_assemble_version(sample_catalog_rich_controls: cat.Catalog, tmp_trestle_dir: pathlib.Path) -> None: """Test catalog assemble version.""" cat_name = 'my_cat' md_name = 'my_md' new_version = '1.2.3' assembled_cat_name = 'my_assembled_cat' catalog_dir = tmp_trestle_dir / f'catalogs/{cat_name}' catalog_dir.mkdir(parents=True, exist_ok=True) catalog_path = catalog_dir / 'catalog.json' sample_catalog_rich_controls.oscal_write(catalog_path) markdown_path = tmp_trestle_dir / md_name catalog_generate = CatalogGenerate() catalog_generate.generate_markdown(tmp_trestle_dir, catalog_path, markdown_path, {}, False) CatalogAssemble.assemble_catalog(tmp_trestle_dir, md_name, assembled_cat_name, cat_name, False, False, new_version) assembled_cat, assembled_cat_path = ModelUtils.load_top_level_model( tmp_trestle_dir, assembled_cat_name, cat.Catalog) assert assembled_cat.metadata.version.__root__ == new_version assert ModelUtils.model_age( assembled_cat) < test_utils.NEW_MODEL_AGE_SECONDS creation_time = assembled_cat_path.stat().st_mtime # assemble same way again and confirm no new write CatalogAssemble.assemble_catalog(tmp_trestle_dir, md_name, assembled_cat_name, assembled_cat_name, False, False, new_version) assert creation_time == assembled_cat_path.stat().st_mtime # change version and confirm write CatalogAssemble.assemble_catalog(tmp_trestle_dir, md_name, assembled_cat_name, assembled_cat_name, False, False, 'xx') assert creation_time < assembled_cat_path.stat().st_mtime
def test_validations_on_dict() -> None: """Test regen of uuid in dict.""" my_uuid1 = str(uuid4()) my_uuid2 = str(uuid4()) my_dict = { 'uuid': my_uuid1, 'ref': my_uuid1, 'my_inner_dict': { 'uuid': my_uuid2, 'ref': my_uuid2 } } new_dict, lut = ModelUtils._regenerate_uuids_in_place(my_dict, {}) assert my_dict['uuid'] != new_dict['uuid'] assert my_dict['my_inner_dict']['uuid'] != new_dict['my_inner_dict']['uuid'] assert len(lut) == 2 fixed_dict, count = ModelUtils._update_new_uuid_refs(new_dict, lut) assert fixed_dict['uuid'] == fixed_dict['ref'] assert fixed_dict['my_inner_dict']['uuid'] == fixed_dict['my_inner_dict'][ 'ref'] assert count == 2