def test_load_distributed(testdata_dir, tmp_trestle_dir):
    """Test massive distributed load, that includes recusive load, list and dict."""
    # prepare trestle project dir with the file
    test_utils.ensure_trestle_config_dir(tmp_trestle_dir)

    test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs'

    catalogs_dir = Path('catalogs/')
    mycatalog_dir = catalogs_dir / 'mycatalog'
    catalog_file = mycatalog_dir / 'catalog.json'

    # Copy files from test/data/split_merge/step4
    shutil.rmtree(catalogs_dir)
    shutil.copytree(test_data_source, catalogs_dir)

    actual_model_type, actual_model_alias, actual_model_instance = load_distributed(
        catalog_file)

    expected_model_type, _ = fs.get_contextual_model_type(
        catalog_file.absolute())

    expected_model_instance = Catalog.oscal_read(
        testdata_dir / 'split_merge/load_distributed/catalog.json')

    assert actual_model_type == expected_model_type
    assert actual_model_alias == 'catalog'
    assert len(
        list(dictdiffer.diff(expected_model_instance,
                             actual_model_instance))) == 0
Example #2
0
    def assemble_model(cls, model_alias: str, object_type: Type[TLO],
                       args: argparse.Namespace) -> int:
        """Assemble a top level OSCAL model within the trestle dist directory."""
        log.set_log_level_from_args(args)
        trestle_root = fs.get_trestle_project_root(Path.cwd())
        if not trestle_root:
            logger.error(
                f'Current working directory {Path.cwd()} is not with a trestle project.'
            )
            return 1
        if not trestle_root == Path.cwd():
            logger.error(
                f'Current working directory {Path.cwd()} is not the top level trestle project directory.'
            )
            return 1

        # contruct path to the model file name
        root_model_dir = Path.cwd() / f'{model_alias}s'
        try:
            model_file_type = fs.get_contextual_file_type(root_model_dir /
                                                          args.name)
        except Exception as e:
            logger.error('No files found in the specified model directory.')
            logger.debug(e)
            return 1

        model_file_name = f'{model_alias}{FileContentType.to_file_extension(model_file_type)}'
        root_model_filepath = root_model_dir / args.name / model_file_name

        if not root_model_filepath.exists():
            logger.error(f'No top level model file at {root_model_dir}')
            return 1

        # distributed load
        _, _, assembled_model = load_distributed(root_model_filepath)
        plural_alias = model_alias if model_alias[
            -1] == 's' else model_alias + 's'
        assembled_model_dir = trestle_root / const.TRESTLE_DIST_DIR / plural_alias

        assembled_model_filepath = assembled_model_dir / f'{args.name}.{args.extension}'

        plan = Plan()
        plan.add_action(CreatePathAction(assembled_model_filepath, True))
        plan.add_action(
            WriteFileAction(
                assembled_model_filepath, Element(assembled_model),
                FileContentType.to_content_type(f'.{args.extension}')))

        try:
            plan.simulate()
            plan.execute()
            return 0
        except Exception as e:
            logger.error(
                'Unknown error executing trestle create operations. Rolling back.'
            )
            logger.debug(e)
            return 1
Example #3
0
def test_merge_everything_into_catalog(testdata_dir, tmp_trestle_dir):
    """Test '$mycatalog$ trestle merge -e catalog.*' when metadata and catalog is already split."""
    # Assume we are running a command like below
    # trestle merge -e catalog.back-matter
    content_type = FileContentType.JSON
    fext = FileContentType.to_file_extension(content_type)

    # prepare trestle project dir with the file
    test_utils.ensure_trestle_config_dir(tmp_trestle_dir)

    test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs'
    catalogs_dir = Path('catalogs/')
    mycatalog_dir = catalogs_dir / 'mycatalog'

    # Copy files from test/data/split_merge/step4
    shutil.rmtree(catalogs_dir)
    shutil.copytree(test_data_source, catalogs_dir)

    # Change directory to mycatalog_dir
    os.chdir(mycatalog_dir)
    catalog_file = Path(f'catalog{fext}')

    assert catalog_file.exists()

    # Read files

    # Create hand-crafter merge plan
    expected_plan: Plan = Plan()

    reset_destination_action = CreatePathAction(catalog_file.absolute(),
                                                clear_content=True)
    expected_plan.add_action(reset_destination_action)

    _, _, merged_catalog_instance = load_distributed(catalog_file)

    element = Element(merged_catalog_instance)
    write_destination_action = WriteFileAction(catalog_file,
                                               element,
                                               content_type=content_type)
    expected_plan.add_action(write_destination_action)
    delete_element_action = RemovePathAction(Path('catalog').absolute())
    expected_plan.add_action(delete_element_action)

    # Call merged()
    generated_plan = MergeCmd.merge(ElementPath('catalog.*'))

    # Assert the generated plan matches the expected plan'
    assert generated_plan == expected_plan
def test_assemble_catalog(testdata_dir: pathlib.Path, tmp_trestle_dir: pathlib.Path) -> None:
    """Test assembling a catalog."""
    test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs'
    catalogs_dir = pathlib.Path('catalogs/')
    mycatalog_dir = catalogs_dir / 'mycatalog'
    # Copy files from test/data/split_merge/step4
    shutil.rmtree(catalogs_dir)
    shutil.rmtree(pathlib.Path('dist'))
    shutil.copytree(test_data_source, catalogs_dir)

    testargs = ['trestle', 'assemble', 'catalog', '-n', 'mycatalog', '-x', 'json']
    with mock.patch.object(sys, 'argv', testargs):
        rc = Trestle().run()
        assert rc == 0

    # Read assembled model
    actual_model = Catalog.oscal_read(pathlib.Path('dist/catalogs/mycatalog.json'))
    _, _, expected_model = load_distributed(mycatalog_dir / 'catalog.json')

    assert actual_model == expected_model
Example #5
0
    def merge(cls, element_path: ElementPath) -> Plan:
        """Merge operations.

        It returns a plan for the operation
        """
        element_path_list = element_path.get_full_path_parts()
        target_model_alias = element_path_list[-1]
        """1. Load desination model into a stripped model"""
        # Load destination model
        destination_model_alias = element_path_list[-2]
        # Destination model filetype
        try:
            file_type = fs.get_contextual_file_type(Path(os.getcwd()))
        except Exception as e:
            raise TrestleError(str(e))
        file_ext = FileContentType.to_file_extension(file_type)
        # Destination model filename
        destination_model_filename = Path(
            f'{utils.classname_to_alias(destination_model_alias, "json")}{file_ext}'
        )
        destination_model_type, _ = fs.get_stripped_contextual_model(
            destination_model_filename.absolute())

        destination_model_object = destination_model_type.oscal_read(
            destination_model_filename)
        """1.5. If target is wildcard, load distributed destrination model and replace destination model."""
        # Handle WILDCARD '*' match. Return plan to load the destination model, with it's distributed attributes
        if target_model_alias == '*':
            merged_model_type, merged_model_alias, merged_model_instance = load_distributed.load_distributed(
                destination_model_filename)
            plan = Plan()
            reset_destination_action = CreatePathAction(
                destination_model_filename.absolute(), clear_content=True)
            write_destination_action = WriteFileAction(
                destination_model_filename,
                Element(merged_model_instance),
                content_type=file_type)
            delete_target_action = RemovePathAction(
                Path(merged_model_alias).absolute())
            plan: Plan = Plan()
            plan.add_action(reset_destination_action)
            plan.add_action(write_destination_action)
            plan.add_action(delete_target_action)
            return plan

        # Get destination model without the target field stripped
        merged_model_type, merged_model_alias = fs.get_stripped_contextual_model(
            destination_model_filename.absolute(),
            aliases_not_to_be_stripped=[target_model_alias])
        """2. Load Target model. Target model could be stripped"""
        try:
            target_model_type = utils.get_target_model(element_path_list,
                                                       merged_model_type)
        except Exception as e:
            raise TrestleError(
                f'Target model not found. Possibly merge of the elements not allowed at this point. {str(e)}'
            )
        # target_model filename - depends whether destination model is decomposed or not
        if (Path(os.getcwd()) / destination_model_alias).exists():
            target_model_path = f'{os.getcwd()}/{destination_model_alias}/{target_model_alias}'
        else:
            target_model_path = target_model_alias

        # if target model is a file then handle file. If file doesn't exist, handle the directory,
        # but in this case it's a list or a dict collection type
        if (Path(f'{target_model_path}{file_ext}')).exists():
            target_model_filename = Path(f'{target_model_path}{file_ext}')
            _, _, target_model_object = load_distributed.load_distributed(
                target_model_filename)
        else:
            target_model_filename = Path(target_model_path)
            collection_type = utils.get_origin(target_model_type)
            _, _, target_model_object = load_distributed.load_distributed(
                target_model_filename, collection_type)

        if hasattr(target_model_object,
                   '__dict__') and '__root__' in target_model_object.__dict__:
            target_model_object = target_model_object.__dict__['__root__']
        """3. Insert target model into destination model."""
        merged_dict = destination_model_object.__dict__
        merged_dict[target_model_alias] = target_model_object
        merged_model_object = merged_model_type(**merged_dict)  # type: ignore
        merged_destination_element = Element(merged_model_object)
        """4. Create action  plan"""
        reset_destination_action = CreatePathAction(
            destination_model_filename.absolute(), clear_content=True)
        write_destination_action = WriteFileAction(destination_model_filename,
                                                   merged_destination_element,
                                                   content_type=file_type)
        delete_target_action = RemovePathAction(target_model_filename)

        plan: Plan = Plan()
        plan.add_action(reset_destination_action)
        plan.add_action(write_destination_action)
        plan.add_action(delete_target_action)

        # TODO: Destination model directory is empty or already merged? Then clean up.

        return plan