def test_to_file_extension() -> None: """Test to_file_extension method.""" assert FileContentType.to_file_extension(FileContentType.JSON) == '.json' assert FileContentType.to_file_extension(FileContentType.YAML) == '.yaml' with pytest.raises(TrestleError): FileContentType.to_file_extension(-1)
def replicate_object(cls, model_alias: str, args: argparse.Namespace) -> int: """ Core replicate routine invoked by subcommands. Args: model_alias: Name of the top level model in the trestle directory. args: CLI arguments Returns: A return code that can be used as standard posix codes. 0 is success. """ logger.debug('Entering replicate_object.') # 1 Bad working directory if not running from current working directory trestle_root = args.trestle_root # trestle root is set via command line in args. Default is cwd. if not trestle_root or not file_utils.is_valid_project_root(trestle_root): raise TrestleError(f'Given directory: {trestle_root} is not a trestle project.') plural_path = ModelUtils.model_type_to_model_dir(model_alias) # 2 Check that input file given exists. input_file_stem = trestle_root / plural_path / args.name / model_alias content_type = FileContentType.path_to_content_type(input_file_stem) if content_type == FileContentType.UNKNOWN: raise TrestleError( f'Input file {args.name} has no json or yaml file at expected location {input_file_stem}.' ) input_file = input_file_stem.with_suffix(FileContentType.to_file_extension(content_type)) # 3 Distributed load from file _, model_alias, model_instance = ModelUtils.load_distributed(input_file, trestle_root) rep_model_path = trestle_root / plural_path / args.output / ( model_alias + FileContentType.to_file_extension(content_type) ) if rep_model_path.exists(): raise TrestleError(f'OSCAL file to be replicated here: {rep_model_path} exists.') if args.regenerate: logger.debug(f'regenerating uuids for model {input_file}') model_instance, uuid_lut, n_refs_updated = ModelUtils.regenerate_uuids(model_instance) logger.debug(f'{len(uuid_lut)} uuids generated and {n_refs_updated} references updated') # 4 Prepare actions and plan top_element = Element(model_instance) create_action = CreatePathAction(rep_model_path, True) write_action = WriteFileAction(rep_model_path, top_element, content_type) # create a plan to create the directory and imported file. replicate_plan = Plan() replicate_plan.add_action(create_action) replicate_plan.add_action(write_action) replicate_plan.execute() return CmdReturnCodes.SUCCESS.value
def to_file_path(self, content_type: FileContentType = None, root_dir: str = '') -> pathlib.Path: """Convert to a file or directory path for the element path. if content_type is not passed, it will return a path for directory """ path_parts = self.get() # skip wildcard if path_parts[-1] == ElementPath.WILDCARD: path_parts = path_parts[:-1] if root_dir != '': path_parts[0] = root_dir path_str = '/'.join(path_parts) # add file extension if required # this will be omitted if it is a dir path if content_type is not None: file_extension = FileContentType.to_file_extension(content_type) path_str = path_str + file_extension # prepare the path file_path: pathlib.Path = pathlib.Path(f'./{path_str}') return file_path
def load_top_level_model( trestle_root: pathlib.Path, model_name: str, model_class: Type[TopLevelOscalModel], file_content_type: Optional[FileContentType] = None ) -> Tuple[Union[OscalBaseModel, List[OscalBaseModel], Dict[ str, OscalBaseModel]], pathlib.Path]: """Load a model by name and model class and infer file content type if not specified. If you need to load an existing model but its content type may not be known, use this method. But the file content type should be specified if it is somehow known. """ root_model_path = ModelUtils._root_path_for_top_level_model( trestle_root, model_name, model_class) if file_content_type is None: file_content_type = FileContentType.path_to_content_type( root_model_path) if not FileContentType.is_readable_file(file_content_type): raise TrestleError( f'Unable to load model {model_name} without specifying json or yaml.' ) full_model_path = root_model_path.with_suffix( FileContentType.to_file_extension(file_content_type)) _, _, model = ModelUtils.load_distributed(full_model_path, trestle_root) return model, full_model_path
def test_target_dups(tmp_dir): """Test model validation.""" content_type = FileContentType.YAML models_dir_name = test_utils.TARGET_DEFS_DIR model_ref = ostarget.TargetDefinition test_utils.ensure_trestle_config_dir(tmp_dir) file_ext = FileContentType.to_file_extension(content_type) models_full_path = tmp_dir / models_dir_name / 'my_test_model' model_alias = utils.classname_to_alias(model_ref.__name__, 'json') model_def_file = models_full_path / f'{model_alias}{file_ext}' fs.ensure_directory(models_full_path) shutil.copyfile('tests/data/yaml/good_target.yaml', model_def_file) testcmd = f'trestle validate -f {model_def_file} -m duplicates -i uuid' with patch.object(sys, 'argv', testcmd.split()): with pytest.raises(SystemExit) as pytest_wrapped_e: cli.run() assert pytest_wrapped_e.type == SystemExit assert pytest_wrapped_e.value.code is None shutil.copyfile('tests/data/yaml/bad_target_dup_uuid.yaml', model_def_file) testcmd = f'trestle validate -f {model_def_file} -m duplicates -i uuid' with patch.object(sys, 'argv', testcmd.split()): with pytest.raises(TrestleValidationError) as pytest_wrapped_e: cli.run() assert pytest_wrapped_e.type == TrestleValidationError
def test_merge_everything_into_catalog_with_hidden_files_in_folders( testdata_dir, tmp_trestle_dir): """Test trestle merge -e 'catalog.*' when metadata and catalog are split and hidden files are present.""" # Assume we are running a command like below # trestle merge -e catalog.* content_type = FileContentType.JSON fext = FileContentType.to_file_extension(content_type) # prepare trestle project dir with the file test_utils.ensure_trestle_config_dir(tmp_trestle_dir) test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs' catalogs_dir = Path('catalogs/') mycatalog_dir = catalogs_dir / 'mycatalog' # Copy files from test/data/split_merge/step4 shutil.rmtree(catalogs_dir) shutil.copytree(test_data_source, catalogs_dir) # Change directory to mycatalog_dir os.chdir(mycatalog_dir) catalog_file = Path(f'catalog{fext}').resolve() assert catalog_file.exists() # Read files # Create hand-crafter merge plan expected_plan: Plan = Plan() reset_destination_action = CreatePathAction(catalog_file, clear_content=True) expected_plan.add_action(reset_destination_action) _, _, merged_catalog_instance = ModelUtils.load_distributed( catalog_file, tmp_trestle_dir) element = Element(merged_catalog_instance) write_destination_action = WriteFileAction(catalog_file, element, content_type=content_type) expected_plan.add_action(write_destination_action) delete_element_action = RemovePathAction(Path('catalog').resolve()) expected_plan.add_action(delete_element_action) test_utils.make_hidden_file(tmp_trestle_dir / 'catalogs/mycatalog/.DS_Store') test_utils.make_hidden_file(tmp_trestle_dir / 'catalogs/mycatalog/catalog/.DS_Store') test_utils.make_hidden_file( tmp_trestle_dir / 'catalogs/mycatalog/catalog/metadata/.DS_Store') test_utils.make_hidden_file(tmp_trestle_dir / 'catalogs/mycatalog/catalog/groups/.DS_Store') # Call merge() generated_plan = MergeCmd.merge(Path.cwd(), ElementPath('catalog.*'), tmp_trestle_dir) # Assert the generated plan matches the expected plan' assert generated_plan == expected_plan
def to_model_file_name(model_obj: OscalBaseModel, file_prefix: str, content_type: FileContentType) -> str: """Return the file name for the item.""" file_ext = FileContentType.to_file_extension(content_type) model_type = utils.classname_to_alias(type(model_obj).__name__, 'json') file_name = f'{file_prefix}{const.IDX_SEP}{model_type}{file_ext}' return file_name
def to_root_path(self, content_type: FileContentType = None) -> pathlib.Path: """Convert to a file path for the element root.""" path_str = f'./{self.get_first()}' if content_type is not None: file_extension = FileContentType.to_file_extension(content_type) path_str = path_str + file_extension file_path: pathlib.Path = pathlib.Path(path_str) return file_path
def path_for_top_level_model( trestle_root: pathlib.Path, model_name: str, model_class: Type[TopLevelOscalModel], file_content_type: FileContentType) -> pathlib.Path: """ Find the full path of a model given its name, model type and file content type. This does not inspect the file system or confirm the needed path and file exists. """ root_path = ModelUtils._root_path_for_top_level_model( trestle_root, model_name, model_class) return root_path.with_suffix( FileContentType.to_file_extension(file_content_type))
def save_top_level_model(model: TopLevelOscalModel, trestle_root: pathlib.Path, model_name: str, file_content_type: FileContentType) -> None: """Save a model by name and infer model type by inspection. You don't need to specify the model type (catalog, profile, etc.) but you must specify the file content type. If the model directory does not exist, it is created. """ root_model_path = ModelUtils._root_path_for_top_level_model( trestle_root, model_name, model) full_model_path = root_model_path.with_suffix( FileContentType.to_file_extension(file_content_type)) if not full_model_path.parent.exists(): full_model_path.parent.mkdir(parents=True, exist_ok=True) model.oscal_write(full_model_path)
def prepare_trestle_project_dir( tmp_dir, content_type: FileContentType, model_obj: OscalBaseModel, models_dir_name: str ): """Prepare a temp directory with an example OSCAL model.""" ensure_trestle_config_dir(tmp_dir) model_alias = utils.classname_to_alias(model_obj.__class__.__name__, 'json') file_ext = FileContentType.to_file_extension(content_type) models_full_path = tmp_dir / models_dir_name / 'my_test_model' model_def_file = models_full_path / f'{model_alias}{file_ext}' fs.ensure_directory(models_full_path) model_obj.oscal_write(model_def_file) return models_full_path, model_def_file
def prepare_trestle_project_dir( repo_dir: pathlib.Path, content_type: FileContentType, model_obj: OscalBaseModel, models_dir_name: str ): """Prepare a temp directory with an example OSCAL model.""" ensure_trestle_config_dir(repo_dir) model_alias = str_utils.classname_to_alias(model_obj.__class__.__name__, AliasMode.JSON) file_ext = FileContentType.to_file_extension(content_type) models_full_path = repo_dir / models_dir_name / 'my_test_model' model_def_file = models_full_path / f'{model_alias}{file_ext}' models_full_path.mkdir(exist_ok=True, parents=True) model_obj.oscal_write(model_def_file) return models_full_path, model_def_file
def find_last_file_in_path(self, content_type: FileContentType, model_dir: pathlib.Path) -> pathlib.Path: """Find the last (nearest) existing file in the element path leading to this element.""" # model dir is the top level dir for this model, e.g. catalogs/mycat path = model_dir extension = FileContentType.to_file_extension(content_type) good_model: pathlib.Path = None for element in self._path: if element == '*': break model_file = (path / element).with_suffix(extension) if not model_file.exists(): break path = path / element good_model = model_file return good_model
def full_path_for_top_level_model( trestle_root: pathlib.Path, model_name: str, model_class: Type[TopLevelOscalModel], ) -> pathlib.Path: """ Find the full path of an existing model given its name and model type but no file content type. Use this method when you need the path of a model but you don't know the file content type. This method should only be called if the model needs to exist already in the trestle directory. If you do know the file content type, use path_for_top_level_model instead. """ root_model_path = ModelUtils._root_path_for_top_level_model( trestle_root, model_name, model_class) file_content_type = FileContentType.path_to_content_type( root_model_path) if not FileContentType.is_readable_file(file_content_type): raise TrestleError( f'Unable to load model {model_name} as json or yaml.') return root_model_path.with_suffix( FileContentType.to_file_extension(file_content_type))
def test_split_multi_level_dict( tmp_path: pathlib.Path, sample_target_def: ostarget.TargetDefinition) -> None: """Test for split_model method.""" # Assume we are running a command like below # trestle split -f target.yaml -e target-definition.targets.*.target-control-implementations.* content_type = FileContentType.YAML # prepare trestle project dir with the file target_def_dir, target_def_file = test_utils.prepare_trestle_project_dir( tmp_path, content_type, sample_target_def, test_utils.TARGET_DEFS_DIR) file_ext = FileContentType.to_file_extension(content_type) # read the model from file target_def: ostarget.TargetDefinition = ostarget.TargetDefinition.oscal_read( target_def_file) element = Element(target_def) element_args = [ 'target-definition.targets.*.target-control-implementations.*' ] element_paths = test_utils.prepare_element_paths(target_def_dir, element_args) expected_plan = Plan() # extract values targets: dict = element.get_at(element_paths[0]) targets_dir = target_def_dir / element_paths[0].to_file_path() # split every targets for key in targets: # individual target dir target: ostarget.Target = targets[key] target_element = Element(targets[key]) model_type = utils.classname_to_alias(type(target).__name__, 'json') dir_prefix = key target_dir_name = f'{dir_prefix}{const.IDX_SEP}{model_type}' target_file = targets_dir / f'{target_dir_name}{file_ext}' # target control impl dir for the target target_ctrl_impls: dict = target_element.get_at(element_paths[1]) targets_ctrl_dir = targets_dir / element_paths[1].to_file_path( root_dir=target_dir_name) for i, target_ctrl_impl in enumerate(target_ctrl_impls): model_type = utils.classname_to_alias( type(target_ctrl_impl).__name__, 'json') file_prefix = str(i).zfill(const.FILE_DIGIT_PREFIX_LENGTH) file_name = f'{file_prefix}{const.IDX_SEP}{model_type}{file_ext}' file_path = targets_ctrl_dir / file_name expected_plan.add_action(CreatePathAction(file_path)) expected_plan.add_action( WriteFileAction(file_path, Element(target_ctrl_impl), content_type)) # write stripped target model stripped_target = target.stripped_instance( stripped_fields_aliases=[element_paths[1].get_element_name()]) expected_plan.add_action(CreatePathAction(target_file)) expected_plan.add_action( WriteFileAction(target_file, Element(stripped_target), content_type)) root_file = target_def_dir / f'target-definition{file_ext}' remaining_root = element.get().stripped_instance( stripped_fields_aliases=[element_paths[0].get_element_name()]) expected_plan.add_action(CreatePathAction(root_file, True)) expected_plan.add_action( WriteFileAction(root_file, Element(remaining_root), content_type)) split_plan = SplitCmd.split_model(target_def, element_paths, target_def_dir, content_type) assert expected_plan == split_plan
def test_merge_plan_simple_case(testdata_dir, tmp_trestle_dir): """Test '$mycatalog$ trestle merge -e catalog.back-matter'.""" # Assume we are running a command like below # trestle merge -e catalog.back-matter content_type = FileContentType.JSON fext = FileContentType.to_file_extension(content_type) # prepare trestle project dir with the file test_utils.ensure_trestle_config_dir(tmp_trestle_dir) test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs' catalogs_dir = Path('catalogs/') mycatalog_dir = catalogs_dir / 'mycatalog' catalog_dir = mycatalog_dir / 'catalog' # Copy files from test/data/split_merge/step4 shutil.rmtree(catalogs_dir) shutil.copytree(test_data_source, catalogs_dir) os.chdir(mycatalog_dir) catalog_file = Path(f'catalog{fext}').resolve() catalog_dir = Path('catalog/') back_matter_file = (catalog_dir / f'back-matter{fext}').resolve() assert catalog_file.exists() assert back_matter_file.exists() # Read files # The destination file/model needs to be loaded in a stripped model stripped_catalog_type, _ = ModelUtils.get_stripped_model_type( catalog_file.resolve(), tmp_trestle_dir) stripped_catalog = stripped_catalog_type.oscal_read(catalog_file) # Back-matter model needs to be complete and if it is decomposed, needs to be merged recursively first back_matter = common.BackMatter.oscal_read(back_matter_file) # Back-matter needs to be inserted in a stripped Catalog that does NOT exclude the back-matter fields merged_catalog_type, merged_catalog_alias = ModelUtils.get_stripped_model_type( catalog_file.resolve(), tmp_trestle_dir, aliases_not_to_be_stripped=['back-matter']) merged_dict = stripped_catalog.__dict__ merged_dict['back-matter'] = back_matter merged_catalog = merged_catalog_type(**merged_dict) element = Element(merged_catalog, merged_catalog_alias) # Create hand-crafter merge plan reset_destination_action = CreatePathAction(catalog_file, clear_content=True) write_destination_action = WriteFileAction(catalog_file, element, content_type=content_type) delete_element_action = RemovePathAction(back_matter_file) expected_plan: Plan = Plan() expected_plan.add_action(reset_destination_action) expected_plan.add_action(write_destination_action) expected_plan.add_action(delete_element_action) # Call merge() generated_plan = MergeCmd.merge(Path.cwd(), ElementPath('catalog.back-matter'), tmp_trestle_dir) # Assert the generated plan matches the expected plan' assert generated_plan == expected_plan
def _run(self, args: argparse.Namespace) -> int: """Top level import run command.""" try: log.set_log_level_from_args(args) trestle_root = args.trestle_root if not file_utils.is_valid_project_root(trestle_root): raise TrestleRootError( f'Attempt to import from non-valid trestle project root {trestle_root}' ) input_uri = args.file if cache.FetcherFactory.in_trestle_directory( trestle_root, input_uri): raise TrestleError( f'Imported file {input_uri} cannot be from current trestle project. Use duplicate instead.' ) content_type = FileContentType.to_content_type( '.' + input_uri.split('.')[-1]) fetcher = cache.FetcherFactory.get_fetcher(trestle_root, str(input_uri)) model_read, parent_alias = fetcher.get_oscal(True) plural_path = ModelUtils.model_type_to_model_dir(parent_alias) output_name = args.output desired_model_dir = trestle_root / plural_path desired_model_path: pathlib.Path = desired_model_dir / output_name / parent_alias desired_model_path = desired_model_path.with_suffix( FileContentType.to_file_extension(content_type)).resolve() if desired_model_path.exists(): raise TrestleError( f'Cannot import because file to be imported here: {desired_model_path} already exists.' ) if args.regenerate: logger.debug( f'regenerating uuids in imported file {input_uri}') model_read, lut, nchanged = ModelUtils.regenerate_uuids( model_read) logger.debug( f'uuid lut has {len(lut.items())} entries and {nchanged} refs were updated' ) top_element = Element(model_read) create_action = CreatePathAction(desired_model_path, True) write_action = WriteFileAction(desired_model_path, top_element, content_type) # create a plan to create the directory and write the imported file. import_plan = Plan() import_plan.add_action(create_action) import_plan.add_action(write_action) import_plan.execute() args = argparse.Namespace(file=desired_model_path, verbose=args.verbose, trestle_root=args.trestle_root, type=None, all=None) rollback = False try: rc = validatecmd.ValidateCmd()._run(args) if rc > 0: logger.warning( f'Validation of imported file {desired_model_path} did not pass' ) rollback = True except TrestleError as err: logger.warning( f'Import of {str(input_uri)} failed with validation error: {err}' ) rollback = True if rollback: logger.debug( f'Rolling back import of {str(input_uri)} to {desired_model_path}' ) try: import_plan.rollback() except TrestleError as err: raise TrestleError( f'Import failed in plan rollback: {err}. Manually remove {desired_model_path} to recover.' ) logger.debug( f'Successful rollback of import to {desired_model_path}') return CmdReturnCodes.COMMAND_ERROR.value return CmdReturnCodes.SUCCESS.value except Exception as e: # pragma: no cover return handle_generic_command_exception( e, logger, 'Error while importing OSCAL file')
def test_split_multi_level_dict_plans( tmp_path: pathlib.Path, sample_nist_component_def: component.ComponentDefinition, keep_cwd) -> None: """Test for split_model method.""" # Assume we are running a command like below # trestle split -f target.yaml -e component-definition.components.*.control-implementations.* content_type = FileContentType.YAML # prepare trestle project dir with the file component_def_dir, component_def_file = test_utils.prepare_trestle_project_dir( tmp_path, content_type, sample_nist_component_def, test_utils.COMPONENT_DEF_DIR) file_ext = FileContentType.to_file_extension(content_type) # read the model from file component_def: component.ComponentDefinition = component.ComponentDefinition.oscal_read( component_def_file) element = Element(component_def) element_args = [ 'component-definition.components.*.control-implementations.*' ] element_paths = cmd_utils.parse_element_args( None, element_args, component_def_dir.relative_to(tmp_path)) expected_plan = Plan() # extract values components: list = element.get_at(element_paths[0]) components_dir = component_def_dir / element_paths[0].to_file_path() # split every targets for index, comp_obj in enumerate(components): # individual target dir component_element = Element(comp_obj) model_type = str_utils.classname_to_alias( type(comp_obj).__name__, AliasMode.JSON) dir_prefix = str(index).zfill(const.FILE_DIGIT_PREFIX_LENGTH) component_dir_name = f'{dir_prefix}{const.IDX_SEP}{model_type}' component_file = components_dir / f'{component_dir_name}{file_ext}' # target control impl dir for the target component_ctrl_impls: list = component_element.get_at(element_paths[1]) component_ctrl_dir = components_dir / element_paths[1].to_file_path( root_dir=component_dir_name) for i, component_ctrl_impl in enumerate(component_ctrl_impls): model_type = str_utils.classname_to_alias( type(component_ctrl_impl).__name__, AliasMode.JSON) file_prefix = str(i).zfill(const.FILE_DIGIT_PREFIX_LENGTH) file_name = f'{file_prefix}{const.IDX_SEP}{model_type}{file_ext}' file_path = component_ctrl_dir / file_name expected_plan.add_action(CreatePathAction(file_path)) expected_plan.add_action( WriteFileAction(file_path, Element(component_ctrl_impl), content_type)) # write stripped target model stripped_target = comp_obj.stripped_instance( stripped_fields_aliases=[element_paths[1].get_element_name()]) expected_plan.add_action(CreatePathAction(component_file)) expected_plan.add_action( WriteFileAction(component_file, Element(stripped_target), content_type)) root_file = component_def_dir / f'component-definition{file_ext}' remaining_root = element.get().stripped_instance( stripped_fields_aliases=[element_paths[0].get_element_name()]) expected_plan.add_action(CreatePathAction(root_file, True)) expected_plan.add_action( WriteFileAction(root_file, Element(remaining_root), content_type)) split_plan = SplitCmd.split_model(component_def, element_paths, component_def_dir, content_type, '', None) assert expected_plan == split_plan
def test_merge_plan_simple_list(testdata_dir, tmp_trestle_dir): """Test '$mycatalog$ trestle merge -e metadata.roles'.""" # Assume we are running a command like below # trestle merge -e catalog.back-matter content_type = FileContentType.JSON fext = FileContentType.to_file_extension(content_type) # prepare trestle project dir with the file test_utils.ensure_trestle_config_dir(tmp_trestle_dir) test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs' catalogs_dir = Path('catalogs/') mycatalog_dir = catalogs_dir / 'mycatalog' catalog_dir = mycatalog_dir / 'catalog' # Copy files from test/data/split_merge/step4 shutil.rmtree(catalogs_dir) shutil.copytree(test_data_source, catalogs_dir) os.chdir(mycatalog_dir) catalog_dir = Path('catalog/') os.chdir(catalog_dir) metadata_dir = Path('metadata/') metadata_file = Path(f'metadata{fext}') roles_dir = metadata_dir / 'roles' # Read files # The destination file/model needs to be loaded in a stripped model stripped_metadata_type, _ = fs.get_stripped_contextual_model( metadata_file.absolute()) stripped_metadata = stripped_metadata_type.oscal_read(metadata_file) # Back-matter model needs to be complete and if it is decomposed, needs to be merged recursively first roles = [ oscatalog.Role.oscal_read(roles_dir / '00000__role.json'), oscatalog.Role.oscal_read(roles_dir / '00001__role.json') ] # Back-matter needs to be inserted in a stripped Catalog that does NOT exclude the back-matter fields merged_metadata_type, merged_metadata_alias = fs.get_stripped_contextual_model( metadata_file.absolute(), aliases_not_to_be_stripped=['roles']) merged_dict = stripped_metadata.__dict__ merged_dict['roles'] = roles merged_metadata = merged_metadata_type(**merged_dict) element = Element(merged_metadata, merged_metadata_alias) # Create hand-crafter merge plan reset_destination_action = CreatePathAction(metadata_file.absolute(), clear_content=True) write_destination_action = WriteFileAction(metadata_file, element, content_type=content_type) delete_element_action = RemovePathAction(roles_dir.absolute()) expected_plan: Plan = Plan() expected_plan.add_action(reset_destination_action) expected_plan.add_action(write_destination_action) expected_plan.add_action(delete_element_action) # Call merged() generated_plan = MergeCmd.merge(ElementPath('metadata.roles')) # Assert the generated plan matches the expected plan' assert len(list(diff(generated_plan, expected_plan))) == 0
def test_merge_expanded_metadata_into_catalog(testdata_dir, tmp_trestle_dir): """Test '$mycatalog$ trestle merge -e catalog.metadata' when metadata is already split.""" # Assume we are running a command like below # trestle merge -e catalog.back-matter content_type = FileContentType.JSON fext = FileContentType.to_file_extension(content_type) # prepare trestle project dir with the file test_utils.ensure_trestle_config_dir(tmp_trestle_dir) test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs' catalogs_dir = Path('catalogs/') mycatalog_dir = catalogs_dir / 'mycatalog' catalog_dir = mycatalog_dir / 'catalog' # Copy files from test/data/split_merge/step4 shutil.rmtree(catalogs_dir) shutil.copytree(test_data_source, catalogs_dir) # Change directory to mycatalog_dir os.chdir(mycatalog_dir) catalog_file = Path(f'catalog{fext}').resolve() catalog_dir = Path('catalog/') metadata_dir = catalog_dir / 'metadata' metadata_file = (catalog_dir / f'metadata{fext}').resolve() assert catalog_file.exists() assert metadata_dir.exists() assert metadata_file.exists() # Read files # Create hand-crafter merge plan expected_plan: Plan = Plan() reset_destination_action = CreatePathAction(catalog_file, clear_content=True) expected_plan.add_action(reset_destination_action) _, _, merged_metadata_instance = ModelUtils.load_distributed( metadata_file, tmp_trestle_dir) merged_catalog_type, _ = ModelUtils.get_stripped_model_type( catalog_file.resolve(), tmp_trestle_dir, aliases_not_to_be_stripped=['metadata']) stripped_catalog_type, _ = ModelUtils.get_stripped_model_type( catalog_file, tmp_trestle_dir) stripped_catalog = stripped_catalog_type.oscal_read(catalog_file) merged_catalog_dict = stripped_catalog.__dict__ merged_catalog_dict['metadata'] = merged_metadata_instance merged_catalog = merged_catalog_type(**merged_catalog_dict) element = Element(merged_catalog) write_destination_action = WriteFileAction(catalog_file, element, content_type=content_type) expected_plan.add_action(write_destination_action) delete_element_action = RemovePathAction(metadata_file) expected_plan.add_action(delete_element_action) # Call merge() generated_plan = MergeCmd.merge(Path.cwd(), ElementPath('catalog.metadata'), tmp_trestle_dir) # Assert the generated plan matches the expected plan' assert generated_plan == expected_plan
def merge(cls, element_path: ElementPath) -> Plan: """Merge operations. It returns a plan for the operation """ element_path_list = element_path.get_full_path_parts() target_model_alias = element_path_list[-1] """1. Load desination model into a stripped model""" # Load destination model destination_model_alias = element_path_list[-2] # Destination model filetype try: file_type = fs.get_contextual_file_type(Path(os.getcwd())) except Exception as e: raise TrestleError(str(e)) file_ext = FileContentType.to_file_extension(file_type) # Destination model filename destination_model_filename = Path( f'{utils.classname_to_alias(destination_model_alias, "json")}{file_ext}' ) destination_model_type, _ = fs.get_stripped_contextual_model( destination_model_filename.absolute()) destination_model_object = destination_model_type.oscal_read( destination_model_filename) """1.5. If target is wildcard, load distributed destrination model and replace destination model.""" # Handle WILDCARD '*' match. Return plan to load the destination model, with it's distributed attributes if target_model_alias == '*': merged_model_type, merged_model_alias, merged_model_instance = load_distributed.load_distributed( destination_model_filename) plan = Plan() reset_destination_action = CreatePathAction( destination_model_filename.absolute(), clear_content=True) write_destination_action = WriteFileAction( destination_model_filename, Element(merged_model_instance), content_type=file_type) delete_target_action = RemovePathAction( Path(merged_model_alias).absolute()) plan: Plan = Plan() plan.add_action(reset_destination_action) plan.add_action(write_destination_action) plan.add_action(delete_target_action) return plan # Get destination model without the target field stripped merged_model_type, merged_model_alias = fs.get_stripped_contextual_model( destination_model_filename.absolute(), aliases_not_to_be_stripped=[target_model_alias]) """2. Load Target model. Target model could be stripped""" try: target_model_type = utils.get_target_model(element_path_list, merged_model_type) except Exception as e: raise TrestleError( f'Target model not found. Possibly merge of the elements not allowed at this point. {str(e)}' ) # target_model filename - depends whether destination model is decomposed or not if (Path(os.getcwd()) / destination_model_alias).exists(): target_model_path = f'{os.getcwd()}/{destination_model_alias}/{target_model_alias}' else: target_model_path = target_model_alias # if target model is a file then handle file. If file doesn't exist, handle the directory, # but in this case it's a list or a dict collection type if (Path(f'{target_model_path}{file_ext}')).exists(): target_model_filename = Path(f'{target_model_path}{file_ext}') _, _, target_model_object = load_distributed.load_distributed( target_model_filename) else: target_model_filename = Path(target_model_path) collection_type = utils.get_origin(target_model_type) _, _, target_model_object = load_distributed.load_distributed( target_model_filename, collection_type) if hasattr(target_model_object, '__dict__') and '__root__' in target_model_object.__dict__: target_model_object = target_model_object.__dict__['__root__'] """3. Insert target model into destination model.""" merged_dict = destination_model_object.__dict__ merged_dict[target_model_alias] = target_model_object merged_model_object = merged_model_type(**merged_dict) # type: ignore merged_destination_element = Element(merged_model_object) """4. Create action plan""" reset_destination_action = CreatePathAction( destination_model_filename.absolute(), clear_content=True) write_destination_action = WriteFileAction(destination_model_filename, merged_destination_element, content_type=file_type) delete_target_action = RemovePathAction(target_model_filename) plan: Plan = Plan() plan.add_action(reset_destination_action) plan.add_action(write_destination_action) plan.add_action(delete_target_action) # TODO: Destination model directory is empty or already merged? Then clean up. return plan