def test_split_relative_path(tmp_path, keep_cwd: pathlib.Path, simplified_nist_catalog: oscatalog.Catalog, monkeypatch: MonkeyPatch) -> None: """Test split with relative path.""" # prepare trestle project dir with the file cat_name = 'mycat' trestle_root = test_utils.create_trestle_project_with_model( tmp_path, simplified_nist_catalog, cat_name, monkeypatch) orig_model: oscatalog.Catalog = simplified_nist_catalog os.chdir(trestle_root) catalog_dir = trestle_root / 'catalogs' / cat_name catalog_file: pathlib.Path = catalog_dir / 'catalog.json' args = argparse.Namespace(file='catalogs/mycat/catalog.json', element='catalog.metadata', verbose=1, trestle_root=trestle_root) assert SplitCmd()._run(args) == 0 # merge receives an element path not a file path # so need to chdir to where the file is os.chdir(catalog_dir) args = argparse.Namespace(element='catalog.*', verbose=1, trestle_root=trestle_root) assert MergeCmd()._run(args) == 0 new_model: oscatalog.Catalog = oscatalog.Catalog.oscal_read(catalog_file) assert test_utils.models_are_equivalent(orig_model, new_model)
def test_merge_everything_into_catalog_with_hidden_files_in_folders( testdata_dir, tmp_trestle_dir): """Test trestle merge -e 'catalog.*' when metadata and catalog are split and hidden files are present.""" # Assume we are running a command like below # trestle merge -e catalog.* content_type = FileContentType.JSON fext = FileContentType.to_file_extension(content_type) # prepare trestle project dir with the file test_utils.ensure_trestle_config_dir(tmp_trestle_dir) test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs' catalogs_dir = Path('catalogs/') mycatalog_dir = catalogs_dir / 'mycatalog' # Copy files from test/data/split_merge/step4 shutil.rmtree(catalogs_dir) shutil.copytree(test_data_source, catalogs_dir) # Change directory to mycatalog_dir os.chdir(mycatalog_dir) catalog_file = Path(f'catalog{fext}').resolve() assert catalog_file.exists() # Read files # Create hand-crafter merge plan expected_plan: Plan = Plan() reset_destination_action = CreatePathAction(catalog_file, clear_content=True) expected_plan.add_action(reset_destination_action) _, _, merged_catalog_instance = ModelUtils.load_distributed( catalog_file, tmp_trestle_dir) element = Element(merged_catalog_instance) write_destination_action = WriteFileAction(catalog_file, element, content_type=content_type) expected_plan.add_action(write_destination_action) delete_element_action = RemovePathAction(Path('catalog').resolve()) expected_plan.add_action(delete_element_action) test_utils.make_hidden_file(tmp_trestle_dir / 'catalogs/mycatalog/.DS_Store') test_utils.make_hidden_file(tmp_trestle_dir / 'catalogs/mycatalog/catalog/.DS_Store') test_utils.make_hidden_file( tmp_trestle_dir / 'catalogs/mycatalog/catalog/metadata/.DS_Store') test_utils.make_hidden_file(tmp_trestle_dir / 'catalogs/mycatalog/catalog/groups/.DS_Store') # Call merge() generated_plan = MergeCmd.merge(Path.cwd(), ElementPath('catalog.*'), tmp_trestle_dir) # Assert the generated plan matches the expected plan' assert generated_plan == expected_plan
def test_split_deep(tmp_path, keep_cwd: pathlib.Path, simplified_nist_catalog: oscatalog.Catalog, monkeypatch: MonkeyPatch) -> None: """Test deep split of model.""" # prepare trestle project dir with the file cat_name = 'mycat' trestle_root = test_utils.create_trestle_project_with_model( tmp_path, simplified_nist_catalog, cat_name, monkeypatch) orig_model: oscatalog.Catalog = simplified_nist_catalog catalog_dir = trestle_root / 'catalogs' / cat_name catalog_file: pathlib.Path = catalog_dir / 'catalog.json' os.chdir(catalog_dir) args = argparse.Namespace(file='catalog.json', element='catalog.groups.*.controls.*.controls.*', verbose=1, trestle_root=trestle_root) assert SplitCmd()._run(args) == 0 args = argparse.Namespace(element='catalog.*', verbose=1, trestle_root=trestle_root) assert MergeCmd()._run(args) == 0 new_model: oscatalog.Catalog = oscatalog.Catalog.oscal_read(catalog_file) assert test_utils.models_are_equivalent(orig_model, new_model)
def test_split_tutorial_workflow(tmp_path, keep_cwd: pathlib.Path, simplified_nist_catalog: oscatalog.Catalog, monkeypatch: MonkeyPatch) -> None: """Test split operations and final re-merge in workflow tutorial.""" # prepare trestle project dir with the file cat_name = 'mycat' trestle_root = test_utils.create_trestle_project_with_model( tmp_path, simplified_nist_catalog, cat_name, monkeypatch) catalog_dir = trestle_root / 'catalogs' / cat_name catalog_file: pathlib.Path = catalog_dir / 'catalog.json' orig_model = oscatalog.Catalog.oscal_read(catalog_file) # step0 os.chdir(catalog_dir) args = argparse.Namespace( file='catalog.json', element='catalog.metadata,catalog.groups,catalog.back-matter', verbose=1, trestle_root=trestle_root) assert SplitCmd()._run(args) == 0 # step1 os.chdir('catalog') args = argparse.Namespace(file='metadata.json', element='metadata.roles,metadata.parties', verbose=1, trestle_root=trestle_root) assert SplitCmd()._run(args) == 0 # step2 os.chdir('metadata') args = argparse.Namespace(file='roles.json', element='roles.*', verbose=1, trestle_root=trestle_root) assert SplitCmd()._run(args) == 0 args = argparse.Namespace(file='parties.json', element='parties.*', verbose=1, trestle_root=trestle_root) assert SplitCmd()._run(args) == 0 # step3 os.chdir('..') args = argparse.Namespace(file='./groups.json', element='groups.*.controls.*', verbose=1, trestle_root=trestle_root) assert SplitCmd()._run(args) == 0 # step4 os.chdir(catalog_dir) args = argparse.Namespace(element='catalog.*', verbose=1, trestle_root=trestle_root) assert MergeCmd()._run(args) == 0 new_model = oscatalog.Catalog.oscal_read(catalog_file) assert test_utils.models_are_equivalent(orig_model, new_model)
def test_bad_merge(testdata_dir, tmp_trestle_dir): """Test a bad merge element path.""" # prepare trestle project dir with the file test_utils.ensure_trestle_config_dir(tmp_trestle_dir) test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs' catalogs_dir = Path('catalogs/') mycatalog_dir = catalogs_dir / 'mycatalog' # Copy files from test/data/split_merge/step4 shutil.rmtree(catalogs_dir) shutil.copytree(test_data_source, catalogs_dir) os.chdir(mycatalog_dir) cmd = MergeCmd() args = argparse.Namespace(verbose=1, element='catalog.roles') assert cmd._run(args) == 1
def test_split_merge(testdata_dir: pathlib.Path, tmp_trestle_dir: pathlib.Path) -> None: """Test merging data that has been split using the split command- to ensure symmetry.""" # trestle split -f catalog.json -e catalog.groups.*.controls.* # prepare trestle project dir with the file test_utils.ensure_trestle_config_dir(tmp_trestle_dir) test_data_source = testdata_dir / 'split_merge/step0-merged_catalog/catalogs' catalogs_dir = Path('catalogs/') mycatalog_dir = catalogs_dir / 'mycatalog' # Copy files from test/data/split_merge/step4 shutil.rmtree(catalogs_dir) shutil.copytree(test_data_source, catalogs_dir) os.chdir(mycatalog_dir) catalog_file = Path('catalog.json') # Read and store the catalog before split stripped_catalog_type, _ = ModelUtils.get_stripped_model_type( catalog_file.resolve(), tmp_trestle_dir) pre_split_catalog = stripped_catalog_type.oscal_read(catalog_file) assert 'groups' in pre_split_catalog.__fields__.keys() # Split the catalog args = argparse.Namespace(name='split', file='catalog.json', verbose=1, element='catalog.groups.*.controls.*', trestle_root=tmp_trestle_dir) split = SplitCmd()._run(args) assert split == 0 interim_catalog_type, _ = ModelUtils.get_stripped_model_type( catalog_file.resolve(), tmp_trestle_dir) interim_catalog = interim_catalog_type.oscal_read(catalog_file.resolve()) assert 'groups' not in interim_catalog.__fields__.keys() # Merge everything back into the catalog # Equivalent to trestle merge -e catalog.* args = argparse.Namespace(name='merge', element='catalog.*', verbose=2, trestle_root=tmp_trestle_dir) rc = MergeCmd()._run(args) assert rc == 0 # Check both the catalogs are the same. post_catalog_type, _ = ModelUtils.get_stripped_model_type( catalog_file.resolve(), tmp_trestle_dir) post_merge_catalog = post_catalog_type.oscal_read(catalog_file) assert post_merge_catalog == pre_split_catalog
def test_no_file_given(tmp_path, keep_cwd: pathlib.Path, simplified_nist_catalog: oscatalog.Catalog, monkeypatch: MonkeyPatch) -> None: """Test split with no file specified.""" # prepare trestle project dir with the file cat_name = 'mycat' trestle_root = test_utils.create_trestle_project_with_model( tmp_path, simplified_nist_catalog, cat_name, monkeypatch) orig_model: oscatalog.Catalog = simplified_nist_catalog catalog_dir = trestle_root / 'catalogs' / cat_name catalog_file: pathlib.Path = catalog_dir / 'catalog.json' # no file given and cwd not in trestle directory should fail os.chdir(tmp_path) args = argparse.Namespace(file=None, element='catalog.groups', verbose=1, trestle_root=trestle_root) assert SplitCmd()._run(args) == 1 os.chdir(catalog_dir) args = argparse.Namespace(file=None, element='catalog.groups,catalog.metadata', verbose=1, trestle_root=trestle_root) assert SplitCmd()._run(args) == 0 assert (catalog_dir / 'catalog/groups.json').exists() assert (catalog_dir / 'catalog/metadata.json').exists() os.chdir('./catalog') args = argparse.Namespace(file=None, element='groups.*', verbose=1, trestle_root=trestle_root) assert SplitCmd()._run(args) == 0 assert (catalog_dir / 'catalog/groups/00000__group.json').exists() os.chdir('./groups') args = argparse.Namespace(file='00000__group.json', element='group.*', verbose=1, trestle_root=trestle_root) assert SplitCmd()._run(args) == 0 os.chdir(catalog_dir) args = argparse.Namespace(file=None, element='catalog.*', verbose=1, trestle_root=trestle_root) assert MergeCmd()._run(args) == 0 new_model: oscatalog.Catalog = oscatalog.Catalog.oscal_read(catalog_file) assert test_utils.models_are_equivalent(orig_model, new_model)
def test_merge_invalid_element_path(testdata_dir, tmp_trestle_dir): """Test to make sure each element in -e contains 2 parts at least, and no chained element paths.""" cmd = MergeCmd() args = argparse.Namespace(verbose=1, element='catalog', trestle_root=tmp_trestle_dir) assert cmd._run(args) == 1 args = argparse.Namespace(verbose=1, element='catalog.metadata', trestle_root=tmp_trestle_dir) test_utils.ensure_trestle_config_dir(tmp_trestle_dir) test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs' catalogs_dir = Path('catalogs/') mycatalog_dir = catalogs_dir / 'mycatalog' # Copy files from test/data/split_merge/step4 shutil.rmtree(catalogs_dir) shutil.copytree(test_data_source, catalogs_dir) os.chdir(mycatalog_dir) assert cmd._run(args) == 0
def test_split_comp_def( mode, tmp_path, keep_cwd: pathlib.Path, sample_component_definition: component.ComponentDefinition, monkeypatch: MonkeyPatch) -> None: """Test splitting of component definition and its dictionary.""" compdef_name = 'mycomp' trestle_root = test_utils.create_trestle_project_with_model( tmp_path, sample_component_definition, compdef_name, monkeypatch) compdef_dir = trestle_root / 'component-definitions' / compdef_name compdef_file: pathlib.Path = compdef_dir / 'component-definition.json' original_model = sample_component_definition os.chdir(compdef_dir) # do the split in different ways - then re-merge if mode == 'normal_split.*': args = argparse.Namespace(file='component-definition.json', element='component-definition.components.*', verbose=1, trestle_root=trestle_root) assert SplitCmd()._run(args) == 0 elif mode == 'split_two_steps': args = argparse.Namespace(file='component-definition.json', element='component-definition.components', verbose=1, trestle_root=trestle_root) assert SplitCmd()._run(args) == 0 os.chdir('component-definition') args = argparse.Namespace(file='components.json', element='components.*', verbose=1, trestle_root=trestle_root) assert SplitCmd()._run(args) == 0 elif mode == 'split_in_lower_dir': args = argparse.Namespace( file='component-definition.json', element='component-definition.components.*.props', verbose=1, trestle_root=trestle_root) assert SplitCmd()._run(args) == 0 os.chdir(compdef_dir) args = argparse.Namespace(element='component-definition.*', verbose=1, trestle_root=trestle_root) assert MergeCmd()._run(args) == 0 new_model = component.ComponentDefinition.oscal_read(compdef_file) assert test_utils.models_are_equivalent(new_model, original_model)
def test_merge_plan_simple_case(testdata_dir, tmp_trestle_dir): """Test '$mycatalog$ trestle merge -e catalog.back-matter'.""" # Assume we are running a command like below # trestle merge -e catalog.back-matter content_type = FileContentType.JSON fext = FileContentType.to_file_extension(content_type) # prepare trestle project dir with the file test_utils.ensure_trestle_config_dir(tmp_trestle_dir) test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs' catalogs_dir = Path('catalogs/') mycatalog_dir = catalogs_dir / 'mycatalog' catalog_dir = mycatalog_dir / 'catalog' # Copy files from test/data/split_merge/step4 shutil.rmtree(catalogs_dir) shutil.copytree(test_data_source, catalogs_dir) os.chdir(mycatalog_dir) catalog_file = Path(f'catalog{fext}').resolve() catalog_dir = Path('catalog/') back_matter_file = (catalog_dir / f'back-matter{fext}').resolve() assert catalog_file.exists() assert back_matter_file.exists() # Read files # The destination file/model needs to be loaded in a stripped model stripped_catalog_type, _ = ModelUtils.get_stripped_model_type( catalog_file.resolve(), tmp_trestle_dir) stripped_catalog = stripped_catalog_type.oscal_read(catalog_file) # Back-matter model needs to be complete and if it is decomposed, needs to be merged recursively first back_matter = common.BackMatter.oscal_read(back_matter_file) # Back-matter needs to be inserted in a stripped Catalog that does NOT exclude the back-matter fields merged_catalog_type, merged_catalog_alias = ModelUtils.get_stripped_model_type( catalog_file.resolve(), tmp_trestle_dir, aliases_not_to_be_stripped=['back-matter']) merged_dict = stripped_catalog.__dict__ merged_dict['back-matter'] = back_matter merged_catalog = merged_catalog_type(**merged_dict) element = Element(merged_catalog, merged_catalog_alias) # Create hand-crafter merge plan reset_destination_action = CreatePathAction(catalog_file, clear_content=True) write_destination_action = WriteFileAction(catalog_file, element, content_type=content_type) delete_element_action = RemovePathAction(back_matter_file) expected_plan: Plan = Plan() expected_plan.add_action(reset_destination_action) expected_plan.add_action(write_destination_action) expected_plan.add_action(delete_element_action) # Call merge() generated_plan = MergeCmd.merge(Path.cwd(), ElementPath('catalog.back-matter'), tmp_trestle_dir) # Assert the generated plan matches the expected plan' assert generated_plan == expected_plan
def test_split_merge_out_of_context(testdata_dir, tmp_trestle_dir, rel_context_dir: str, use_absolutes: bool, split_elem: str, merge_elem: str, use_effective_cwd: bool): """Test merging data that has been split using the split command- to ensure symmetry.""" # trestle split -f catalog.json -e catalog.groups.*.controls.* # prepare trestle project dir with the file - could e cleaned up. test_utils.ensure_trestle_config_dir(tmp_trestle_dir) test_data_source = testdata_dir / 'split_merge/step0-merged_catalog/catalogs/' # Pontentially change to NIST DIR catalogs_dir = Path('catalogs/') shutil.rmtree(catalogs_dir) shutil.copytree(test_data_source, catalogs_dir) full_path_to_model_dir = tmp_trestle_dir / 'catalogs' / 'mycatalog' full_path_to_model = full_path_to_model_dir / 'catalog.json' full_context_dir = tmp_trestle_dir / rel_context_dir if use_absolutes: model_file = full_path_to_model else: model_file = full_path_to_model.relative_to(full_context_dir) # Always use full context dir for safety os.chdir(full_context_dir) # Read and store the catalog before split stripped_catalog_type, _ = ModelUtils.get_stripped_model_type( full_path_to_model.resolve(), tmp_trestle_dir) pre_split_catalog = stripped_catalog_type.oscal_read(full_path_to_model) assert 'groups' in pre_split_catalog.__fields__.keys() # Split the catalog args = argparse.Namespace(name='split', file=model_file, verbose=0, element=split_elem, trestle_root=tmp_trestle_dir) split = SplitCmd()._run(args) assert split == 0 interim_catalog_type, _ = ModelUtils.get_stripped_model_type( full_path_to_model.resolve(), tmp_trestle_dir) interim_catalog = interim_catalog_type.oscal_read( full_path_to_model.resolve()) assert 'groups' not in interim_catalog.__fields__.keys() # Merge everything back into the catalog # Equivalent to trestle merge -e catalog.* if use_effective_cwd: plan = MergeCmd.merge(full_path_to_model_dir, ElementPath(merge_elem), trestle_root=tmp_trestle_dir) else: os.chdir(full_path_to_model_dir) plan = MergeCmd.merge(pathlib.Path.cwd(), ElementPath(merge_elem), trestle_root=tmp_trestle_dir) plan.execute() # Check both the catalogs are the same. post_catalog_type, _ = ModelUtils.get_stripped_model_type( full_path_to_model.resolve(), tmp_trestle_dir) post_merge_catalog = post_catalog_type.oscal_read(full_path_to_model) assert post_merge_catalog == pre_split_catalog
def test_merge_expanded_metadata_into_catalog(testdata_dir, tmp_trestle_dir): """Test '$mycatalog$ trestle merge -e catalog.metadata' when metadata is already split.""" # Assume we are running a command like below # trestle merge -e catalog.back-matter content_type = FileContentType.JSON fext = FileContentType.to_file_extension(content_type) # prepare trestle project dir with the file test_utils.ensure_trestle_config_dir(tmp_trestle_dir) test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs' catalogs_dir = Path('catalogs/') mycatalog_dir = catalogs_dir / 'mycatalog' catalog_dir = mycatalog_dir / 'catalog' # Copy files from test/data/split_merge/step4 shutil.rmtree(catalogs_dir) shutil.copytree(test_data_source, catalogs_dir) # Change directory to mycatalog_dir os.chdir(mycatalog_dir) catalog_file = Path(f'catalog{fext}').resolve() catalog_dir = Path('catalog/') metadata_dir = catalog_dir / 'metadata' metadata_file = (catalog_dir / f'metadata{fext}').resolve() assert catalog_file.exists() assert metadata_dir.exists() assert metadata_file.exists() # Read files # Create hand-crafter merge plan expected_plan: Plan = Plan() reset_destination_action = CreatePathAction(catalog_file, clear_content=True) expected_plan.add_action(reset_destination_action) _, _, merged_metadata_instance = ModelUtils.load_distributed( metadata_file, tmp_trestle_dir) merged_catalog_type, _ = ModelUtils.get_stripped_model_type( catalog_file.resolve(), tmp_trestle_dir, aliases_not_to_be_stripped=['metadata']) stripped_catalog_type, _ = ModelUtils.get_stripped_model_type( catalog_file, tmp_trestle_dir) stripped_catalog = stripped_catalog_type.oscal_read(catalog_file) merged_catalog_dict = stripped_catalog.__dict__ merged_catalog_dict['metadata'] = merged_metadata_instance merged_catalog = merged_catalog_type(**merged_catalog_dict) element = Element(merged_catalog) write_destination_action = WriteFileAction(catalog_file, element, content_type=content_type) expected_plan.add_action(write_destination_action) delete_element_action = RemovePathAction(metadata_file) expected_plan.add_action(delete_element_action) # Call merge() generated_plan = MergeCmd.merge(Path.cwd(), ElementPath('catalog.metadata'), tmp_trestle_dir) # Assert the generated plan matches the expected plan' assert generated_plan == expected_plan
def test_merge_plan_simple_list(testdata_dir, tmp_trestle_dir): """Test '$mycatalog$ trestle merge -e metadata.roles'.""" # Assume we are running a command like below # trestle merge -e catalog.back-matter content_type = FileContentType.JSON fext = FileContentType.to_file_extension(content_type) # prepare trestle project dir with the file test_utils.ensure_trestle_config_dir(tmp_trestle_dir) test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs' catalogs_dir = Path('catalogs/') mycatalog_dir = catalogs_dir / 'mycatalog' catalog_dir = mycatalog_dir / 'catalog' # Copy files from test/data/split_merge/step4 shutil.rmtree(catalogs_dir) shutil.copytree(test_data_source, catalogs_dir) os.chdir(mycatalog_dir) catalog_dir = Path('catalog/') os.chdir(catalog_dir) metadata_dir = Path('metadata/') metadata_file = Path(f'metadata{fext}') roles_dir = metadata_dir / 'roles' # Read files # The destination file/model needs to be loaded in a stripped model stripped_metadata_type, _ = fs.get_stripped_contextual_model( metadata_file.absolute()) stripped_metadata = stripped_metadata_type.oscal_read(metadata_file) # Back-matter model needs to be complete and if it is decomposed, needs to be merged recursively first roles = [ oscatalog.Role.oscal_read(roles_dir / '00000__role.json'), oscatalog.Role.oscal_read(roles_dir / '00001__role.json') ] # Back-matter needs to be inserted in a stripped Catalog that does NOT exclude the back-matter fields merged_metadata_type, merged_metadata_alias = fs.get_stripped_contextual_model( metadata_file.absolute(), aliases_not_to_be_stripped=['roles']) merged_dict = stripped_metadata.__dict__ merged_dict['roles'] = roles merged_metadata = merged_metadata_type(**merged_dict) element = Element(merged_metadata, merged_metadata_alias) # Create hand-crafter merge plan reset_destination_action = CreatePathAction(metadata_file.absolute(), clear_content=True) write_destination_action = WriteFileAction(metadata_file, element, content_type=content_type) delete_element_action = RemovePathAction(roles_dir.absolute()) expected_plan: Plan = Plan() expected_plan.add_action(reset_destination_action) expected_plan.add_action(write_destination_action) expected_plan.add_action(delete_element_action) # Call merged() generated_plan = MergeCmd.merge(ElementPath('metadata.roles')) # Assert the generated plan matches the expected plan' assert len(list(diff(generated_plan, expected_plan))) == 0