Ejemplo n.º 1
0
def test_split_merge(testdata_dir, tmp_trestle_dir):
    """Test merging data that has been split using the split command- to ensure symmetry."""
    # trestle split -f catalog.json -e catalog.groups.*.controls.*

    # prepare trestle project dir with the file
    test_utils.ensure_trestle_config_dir(tmp_trestle_dir)

    test_data_source = testdata_dir / 'split_merge/step0-merged_catalog/catalogs'

    catalogs_dir = Path('catalogs/')
    mycatalog_dir = catalogs_dir / 'mycatalog'

    # Copy files from test/data/split_merge/step4
    shutil.rmtree(catalogs_dir)
    shutil.copytree(test_data_source, catalogs_dir)

    os.chdir(mycatalog_dir)
    catalog_file = Path('catalog.json')

    # Read and store the catalog before split
    stripped_catalog_type, _ = fs.get_stripped_contextual_model(
        catalog_file.absolute())
    pre_split_catalog = stripped_catalog_type.oscal_read(catalog_file)
    assert 'groups' in pre_split_catalog.__fields__.keys()

    # Split the catalog
    args = argparse.Namespace(name='split',
                              file='catalog.json',
                              verbose=1,
                              element='catalog.groups.*.controls.*')
    split = SplitCmd()._run(args)

    assert split == 0

    interim_catalog_type, _ = fs.get_stripped_contextual_model(
        catalog_file.absolute())
    interim_catalog = interim_catalog_type.oscal_read(catalog_file.absolute())
    assert 'groups' not in interim_catalog.__fields__.keys()

    # Merge everything back into the catalog
    # Equivalent to trestle merge -e catalog.*
    args = argparse.Namespace(name='merge', element='catalog.*', verbose=1)
    MergeCmd()._run(args)

    # Check both the catalogs are the same.
    post_catalog_type, _ = fs.get_stripped_contextual_model(
        catalog_file.absolute())
    post_merge_catalog = post_catalog_type.oscal_read(catalog_file)
    assert post_merge_catalog == pre_split_catalog
def test_load_list(testdata_dir, tmp_trestle_dir):
    """Test loading a list recursively."""
    # prepare trestle project dir with the file
    test_utils.ensure_trestle_config_dir(tmp_trestle_dir)

    test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs'

    catalogs_dir = Path('catalogs/')
    mycatalog_dir = catalogs_dir / 'mycatalog'
    catalog_dir = mycatalog_dir / 'catalog'

    # Copy files from test/data/split_merge/step4
    shutil.rmtree(catalogs_dir)
    shutil.copytree(test_data_source, catalogs_dir)

    actual_model_type, actual_model_alias, actual_roles = _load_list(
        catalog_dir / 'metadata' / 'roles')

    expected_roles = [
        Role.oscal_read(catalog_dir / 'metadata/roles/00000__role.json'),
        Role.oscal_read(catalog_dir / 'metadata/roles/00001__role.json')
    ]

    expected_model_type, _ = fs.get_stripped_contextual_model(
        (catalog_dir / 'metadata/roles').absolute())

    assert actual_model_type.__signature__ == expected_model_type.__signature__
    assert actual_model_alias == 'catalog.metadata.roles'
    assert test_utils.list_unordered_equal(actual_roles, expected_roles)
Ejemplo n.º 3
0
def test_striped_model(tmp_path, sample_catalog_minimal):
    """Test _run for AddCmd for stripped model."""
    cwd = os.getcwd()
    content_type = FileContentType.JSON
    catalog_def_dir, catalog_def_file = test_utils.prepare_trestle_project_dir(
        tmp_path, content_type, sample_catalog_minimal,
        test_utils.CATALOGS_DIR)
    os.chdir(catalog_def_dir)
    testargs = [
        'trestle', 'split', '-f', 'catalog.json', '-e', 'catalog.metadata'
    ]
    with patch.object(sys, 'argv', testargs):
        Trestle().run()

    # Now that the metadata has been split, add of catalog.metadata.roles will error,
    # but add of catalog.back-matter will pass

    testargs = [
        'trestle', 'add', '-f', 'catalog.json', '-e', 'catalog.metadata.roles'
    ]

    with patch.object(sys, 'argv', testargs):
        assert Trestle().run() == 1

    testargs = [
        'trestle', 'add', '-f', 'catalog.json', '-e', 'catalog.back-matter'
    ]

    current_model, _ = get_stripped_contextual_model()
    current_catalog = current_model.oscal_read(pathlib.Path('catalog.json'))
    current_catalog.back_matter = BackMatter()
    expected_catalog = current_catalog

    with patch.object(sys, 'argv', testargs):
        Trestle().run()

    actual_model, _ = get_stripped_contextual_model()
    actual_catalog = actual_model.oscal_read(pathlib.Path('catalog.json'))
    assert expected_catalog == actual_catalog

    os.chdir(cwd)
Ejemplo n.º 4
0
def _load_dict(
    filepath: Path
) -> Tuple[Type[OscalBaseModel], str, Dict[str, OscalBaseModel]]:
    """Given path to a directory of additionalProperty(dict) models, load the distributed models."""
    model_dict: Dict[str, OscalBaseModel] = {}
    collection_model_type, collection_model_alias = fs.get_stripped_contextual_model(
        filepath.absolute())
    for path in sorted(Path.iterdir(filepath)):
        model_type, model_alias, model_instance = load_distributed(path)
        field_name = path.parts[-1].split('__')[0]
        model_dict[field_name] = model_instance

    return collection_model_type, collection_model_alias, model_dict
Ejemplo n.º 5
0
    def _run(self, args: argparse.Namespace) -> int:
        """Add an OSCAL component/subcomponent to the specified component.

        This method takes input a filename and a list of comma-seperated element path. Element paths are field aliases.
        The method first finds the parent model from the file and loads the file into the model.
        Then the method executes 'add' for each of the element paths specified.
        """
        log.set_log_level_from_args(args)
        try:
            args_dict = args.__dict__

            file_path = pathlib.Path(args_dict[const.ARG_FILE])

            # Get parent model and then load json into parent model
            parent_model, parent_alias = fs.get_stripped_contextual_model(
                file_path.absolute())
            parent_object = parent_model.oscal_read(file_path.absolute())
            # FIXME : handle YAML files after detecting file type
            parent_element = Element(
                parent_object,
                utils.classname_to_alias(parent_model.__name__, 'json'))

            add_plan = Plan()

            # Do _add for each element_path specified in args
            element_paths: List[str] = args_dict[const.ARG_ELEMENT].split(',')
            for elm_path_str in element_paths:
                element_path = ElementPath(elm_path_str)
                update_action, parent_element = self.add(
                    element_path, parent_model, parent_element)
                add_plan.add_action(update_action)

            create_action = CreatePathAction(file_path.absolute(), True)
            write_action = WriteFileAction(
                file_path.absolute(), parent_element,
                FileContentType.to_content_type(file_path.suffix))

            add_plan.add_action(create_action)
            add_plan.add_action(write_action)

            add_plan.simulate()
            add_plan.execute()

        except BaseException as err:
            logger.error(f'Add failed: {err}')
            return 1
        return 0
Ejemplo n.º 6
0
    def _run(self, args: argparse.Namespace) -> int:
        """Split an OSCAL file into elements."""
        logger.debug('Entering trestle split.')
        log.set_log_level_from_args(args)
        # get the Model
        args_raw = args.__dict__
        if args_raw[const.ARG_FILE] is None:
            logger.error(f'Argument "-{const.ARG_FILE_SHORT}" is required')
            return 1

        file_path = pathlib.Path(args_raw[const.ARG_FILE])
        if not file_path.exists():
            logger.error(f'File {file_path} does not exist.')
            return 1
        content_type = FileContentType.to_content_type(file_path.suffix)

        # find the base directory of the file
        file_absolute_path = pathlib.Path(file_path.absolute())
        base_dir = file_absolute_path.parent

        model_type, _ = fs.get_stripped_contextual_model(file_absolute_path)

        # FIXME: Handle list/dicts
        model: OscalBaseModel = model_type.oscal_read(file_path)

        element_paths: List[ElementPath] = cmd_utils.parse_element_args(
            args_raw[const.ARG_ELEMENT].split(','))

        split_plan = self.split_model(model,
                                      element_paths,
                                      base_dir,
                                      content_type,
                                      root_file_name=args_raw[const.ARG_FILE])

        # Simulate the plan
        # if it fails, it would throw errors and get out of this command
        split_plan.simulate()

        # If we are here then simulation passed
        # so move the original file to the trash
        trash.store(file_path, True)

        # execute the plan
        split_plan.execute()
        return 0
Ejemplo n.º 7
0
def _load_list(
        filepath: Path
) -> Tuple[Type[OscalBaseModel], str, List[OscalBaseModel]]:
    """Given path to a directory of list(array) models, load the distributed models."""
    aliases_not_to_be_stripped = []
    instances_to_be_merged: List[OscalBaseModel] = []
    # TODO: FIXME: fs.get_stripped_contextual_model fails without absolute file path!!! FIX IT!!
    collection_model_type, collection_model_alias = fs.get_stripped_contextual_model(
        filepath.absolute())

    for path in sorted(Path.iterdir(filepath)):

        # ASSUMPTION HERE: if it is a directory, there's a file that can not be decomposed further.
        if path.is_dir():
            continue
        model_type, model_alias, model_instance = load_distributed(path)

        instances_to_be_merged.append(model_instance)
        aliases_not_to_be_stripped.append(model_alias.split('.')[-1])

    return collection_model_type, collection_model_alias, instances_to_be_merged
Ejemplo n.º 8
0
def test_get_stripped_contextual_model(tmp_dir):
    """Test get stripped model type and alias based on filesystem context."""
    with pytest.raises(TrestleError):
        fs.get_stripped_contextual_model(tmp_dir / 'invalidpath') is None

    with pytest.raises(TrestleError):
        fs.get_stripped_contextual_model(tmp_dir) is None

    create_sample_catalog_project(tmp_dir)

    catalogs_dir = tmp_dir / 'catalogs'
    with pytest.raises(TrestleError):
        assert fs.get_stripped_contextual_model(catalogs_dir) is None

    def check_stripped_catalog():
        assert 'uuid' in alias_to_field_map
        assert 'metadata' not in alias_to_field_map
        assert 'back-matter' not in alias_to_field_map
        assert 'groups' not in alias_to_field_map

    mycatalog_dir = catalogs_dir / 'mycatalog'
    stripped_catalog = fs.get_stripped_contextual_model(mycatalog_dir)
    alias_to_field_map = stripped_catalog[0].alias_to_field_map()
    check_stripped_catalog()

    stripped_catalog = fs.get_stripped_contextual_model(mycatalog_dir /
                                                        'catalog.json')
    alias_to_field_map = stripped_catalog[0].alias_to_field_map()
    check_stripped_catalog()

    def check_stripped_metadata():
        assert 'title' in alias_to_field_map
        assert 'published' in alias_to_field_map
        assert 'last-modified' in alias_to_field_map
        assert 'version' in alias_to_field_map
        assert 'oscal-version' in alias_to_field_map
        assert 'revision-history' in alias_to_field_map
        assert 'document-ids' in alias_to_field_map
        assert 'links' in alias_to_field_map
        assert 'locations' in alias_to_field_map
        assert 'parties' in alias_to_field_map
        assert 'remarks' in alias_to_field_map
        assert 'roles' not in alias_to_field_map
        assert 'responsible-properties' not in alias_to_field_map
        assert 'properties' not in alias_to_field_map

    catalog_dir = mycatalog_dir / 'catalog'
    metadata_dir = catalog_dir / 'metadata'
    stripped_catalog = fs.get_stripped_contextual_model(metadata_dir)
    alias_to_field_map = stripped_catalog[0].alias_to_field_map()
    check_stripped_metadata()

    stripped_catalog = fs.get_stripped_contextual_model(catalog_dir /
                                                        'metadata.json')
    alias_to_field_map = stripped_catalog[0].alias_to_field_map()
    check_stripped_metadata()

    groups_dir = catalog_dir / 'groups'
    stripped_catalog = fs.get_stripped_contextual_model(groups_dir)

    assert stripped_catalog[0].__name__ == 'Groups'
    assert stripped_catalog[1] == 'catalog.groups'

    def check_stripped_group():
        assert 'id' in alias_to_field_map
        assert 'class' in alias_to_field_map
        assert 'title' in alias_to_field_map
        assert 'parameters' in alias_to_field_map
        assert 'properties' in alias_to_field_map
        assert 'annotations' in alias_to_field_map
        assert 'links' in alias_to_field_map
        assert 'parts' in alias_to_field_map
        assert 'groups' in alias_to_field_map
        assert 'controls' not in alias_to_field_map

    stripped_catalog = fs.get_stripped_contextual_model(groups_dir /
                                                        f'00000{IDX_SEP}group')
    alias_to_field_map = stripped_catalog[0].alias_to_field_map()
    check_stripped_group()

    stripped_catalog = fs.get_stripped_contextual_model(
        groups_dir / f'00000{IDX_SEP}group.json')
    alias_to_field_map = stripped_catalog[0].alias_to_field_map()
    check_stripped_group()
Ejemplo n.º 9
0
def test_merge_plan_simple_case(testdata_dir, tmp_trestle_dir):
    """Test '$mycatalog$ trestle merge -e catalog.back-matter'."""
    # Assume we are running a command like below
    # trestle merge -e catalog.back-matter
    content_type = FileContentType.JSON
    fext = FileContentType.to_file_extension(content_type)

    # prepare trestle project dir with the file
    test_utils.ensure_trestle_config_dir(tmp_trestle_dir)

    test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs'

    catalogs_dir = Path('catalogs/')
    mycatalog_dir = catalogs_dir / 'mycatalog'
    catalog_dir = mycatalog_dir / 'catalog'

    # Copy files from test/data/split_merge/step4
    shutil.rmtree(catalogs_dir)
    shutil.copytree(test_data_source, catalogs_dir)

    os.chdir(mycatalog_dir)
    catalog_file = Path(f'catalog{fext}')
    catalog_dir = Path('catalog/')
    back_matter_file = catalog_dir / f'back-matter{fext}'

    assert catalog_file.exists()
    assert back_matter_file.exists()

    # Read files

    # The destination file/model needs to be loaded in a stripped model
    stripped_catalog_type, _ = fs.get_stripped_contextual_model(
        catalog_file.absolute())
    stripped_catalog = stripped_catalog_type.oscal_read(catalog_file)

    # Back-matter model needs to be complete and if it is decomposed, needs to be merged recursively first
    back_matter = oscatalog.BackMatter.oscal_read(back_matter_file)

    # Back-matter needs to be inserted in a stripped Catalog that does NOT exclude the back-matter fields

    merged_catalog_type, merged_catalog_alias = fs.get_stripped_contextual_model(
        catalog_file.absolute(), aliases_not_to_be_stripped=['back-matter'])
    merged_dict = stripped_catalog.__dict__
    merged_dict['back-matter'] = back_matter
    merged_catalog = merged_catalog_type(**merged_dict)

    element = Element(merged_catalog, merged_catalog_alias)

    # Create hand-crafter merge plan
    reset_destination_action = CreatePathAction(catalog_file.absolute(),
                                                clear_content=True)
    write_destination_action = WriteFileAction(catalog_file,
                                               element,
                                               content_type=content_type)
    delete_element_action = RemovePathAction(back_matter_file.absolute())

    expected_plan: Plan = Plan()
    expected_plan.add_action(reset_destination_action)
    expected_plan.add_action(write_destination_action)
    expected_plan.add_action(delete_element_action)

    # Call merged()

    generated_plan = MergeCmd.merge(ElementPath('catalog.back-matter'))

    # Assert the generated plan matches the expected plan'
    assert generated_plan == expected_plan
Ejemplo n.º 10
0
def test_merge_expanded_metadata_into_catalog(testdata_dir, tmp_trestle_dir):
    """Test '$mycatalog$ trestle merge -e catalog.metadata' when metadata is already split."""
    # Assume we are running a command like below
    # trestle merge -e catalog.back-matter
    content_type = FileContentType.JSON
    fext = FileContentType.to_file_extension(content_type)

    # prepare trestle project dir with the file
    test_utils.ensure_trestle_config_dir(tmp_trestle_dir)

    test_data_source = testdata_dir / 'split_merge/step4_split_groups_array/catalogs'
    catalogs_dir = Path('catalogs/')
    mycatalog_dir = catalogs_dir / 'mycatalog'
    catalog_dir = mycatalog_dir / 'catalog'

    # Copy files from test/data/split_merge/step4
    shutil.rmtree(catalogs_dir)
    shutil.copytree(test_data_source, catalogs_dir)

    # Change directory to mycatalog_dir
    os.chdir(mycatalog_dir)
    catalog_file = Path(f'catalog{fext}')
    catalog_dir = Path('catalog/')
    metadata_dir = catalog_dir / 'metadata'
    metadata_file = catalog_dir / f'metadata{fext}'

    assert catalog_file.exists()
    assert metadata_dir.exists()
    assert metadata_file.exists()

    # Read files

    # Create hand-crafter merge plan
    expected_plan: Plan = Plan()

    reset_destination_action = CreatePathAction(catalog_file.absolute(),
                                                clear_content=True)
    expected_plan.add_action(reset_destination_action)

    _, _, merged_metadata_instance = load_distributed(metadata_file)
    merged_catalog_type, merged_catalog_alias = fs.get_stripped_contextual_model(
        catalog_file.absolute(), aliases_not_to_be_stripped=['metadata'])
    stripped_catalog_type, _ = fs.get_stripped_contextual_model(
        catalog_file.absolute())
    stripped_catalog = stripped_catalog_type.oscal_read(catalog_file)
    merged_catalog_dict = stripped_catalog.__dict__
    merged_catalog_dict['metadata'] = merged_metadata_instance
    merged_catalog = merged_catalog_type(**merged_catalog_dict)
    element = Element(merged_catalog)
    write_destination_action = WriteFileAction(catalog_file,
                                               element,
                                               content_type=content_type)
    expected_plan.add_action(write_destination_action)
    delete_element_action = RemovePathAction(metadata_file.absolute())
    expected_plan.add_action(delete_element_action)

    # Call merged()
    generated_plan = MergeCmd.merge(ElementPath('catalog.metadata'))

    # Assert the generated plan matches the expected plan'
    assert generated_plan == expected_plan
Ejemplo n.º 11
0
def load_distributed(
    file_path: Path,
    collection_type: Optional[Type[Any]] = None
) -> Tuple[Type[OscalBaseModel], str, Union[
        OscalBaseModel, List[OscalBaseModel], Dict[str, OscalBaseModel]]]:
    """
    Given path to a model, load the model.

    If the model is decomposed/split/distributed,the decomposed models are loaded recursively.

    Args:
        file_path (pathlib.Path): The path to the file/directory to be loaded.
        collection_type (Type[Any], optional): The type of collection model, if it is a collection model.
            typing.List if the model is a list, typing.Dict if the model is additionalProperty.
            Defaults to None.

    Returns:
        Tuple[Type[OscalBaseModel], str, Union[OscalBaseModel, List[OscalBaseModel], Dict[str, OscalBaseModel]]]: Return
            a tuple of Model Type (e.g. class 'trestle.oscal.catalog.Catalog'), Model Alias (e.g. 'catalog.metadata'),
            and Instance of the Model. If the model is decomposed/split/distributed, the instance of the model contains
            the decomposed models loaded recursively.
    """
    # If the path contains a list type model
    if collection_type is list:
        return _load_list(file_path)

    # If the path contains a dict type model
    if collection_type is dict:
        return _load_dict(file_path)

    # Get current model
    primary_model_type, primary_model_alias = fs.get_stripped_contextual_model(
        file_path.absolute())
    primary_model_instance = primary_model_type.oscal_read(file_path)
    primary_model_dict = primary_model_instance.__dict__

    # Is model decomposed?
    file_dir = file_path.parent
    decomposed_dir = file_dir / file_path.parts[-1].split('.')[0]

    if decomposed_dir.exists():
        aliases_not_to_be_stripped = []
        instances_to_be_merged: List[OscalBaseModel] = []

        for path in sorted(Path.iterdir(decomposed_dir)):

            if path.is_file():
                model_type, model_alias, model_instance = load_distributed(
                    path)
                aliases_not_to_be_stripped.append(model_alias.split('.')[-1])
                instances_to_be_merged.append(model_instance)

            elif path.is_dir():
                model_type, model_alias = fs.get_stripped_contextual_model(
                    path.absolute())
                # Only load the directory if it is a collection model. Otherwise do nothing - it gets loaded when
                # iterating over the model file
                if '__root__' in model_type.__fields__.keys(
                ) and utils.is_collection_field_type(
                        model_type.__fields__['__root__'].outer_type_):
                    # This directory is a decomposed List or Dict
                    collection_type = utils.get_origin(
                        model_type.__fields__['__root__'].outer_type_)
                    model_type, model_alias, model_instance = load_distributed(
                        path, collection_type)
                    aliases_not_to_be_stripped.append(
                        model_alias.split('.')[-1])
                    instances_to_be_merged.append(model_instance)

        for i in range(len(aliases_not_to_be_stripped)):
            alias = aliases_not_to_be_stripped[i]
            instance = instances_to_be_merged[i]
            if hasattr(instance, '__dict__'
                       ) and '__root__' in instance.__dict__ and isinstance(
                           instance, OscalBaseModel):
                instance = instance.__dict__['__root__']
            primary_model_dict[alias] = instance

        merged_model_type, merged_model_alias = fs.get_stripped_contextual_model(
            file_path.absolute(), aliases_not_to_be_stripped)
        merged_model_instance = merged_model_type(
            **primary_model_dict)  # type: ignore
        return merged_model_type, merged_model_alias, merged_model_instance

    else:
        return primary_model_type, primary_model_alias, primary_model_instance
Ejemplo n.º 12
0
    def merge(cls, element_path: ElementPath) -> Plan:
        """Merge operations.

        It returns a plan for the operation
        """
        element_path_list = element_path.get_full_path_parts()
        target_model_alias = element_path_list[-1]
        """1. Load desination model into a stripped model"""
        # Load destination model
        destination_model_alias = element_path_list[-2]
        # Destination model filetype
        try:
            file_type = fs.get_contextual_file_type(Path(os.getcwd()))
        except Exception as e:
            raise TrestleError(str(e))
        file_ext = FileContentType.to_file_extension(file_type)
        # Destination model filename
        destination_model_filename = Path(
            f'{utils.classname_to_alias(destination_model_alias, "json")}{file_ext}'
        )
        destination_model_type, _ = fs.get_stripped_contextual_model(
            destination_model_filename.absolute())

        destination_model_object = destination_model_type.oscal_read(
            destination_model_filename)
        """1.5. If target is wildcard, load distributed destrination model and replace destination model."""
        # Handle WILDCARD '*' match. Return plan to load the destination model, with it's distributed attributes
        if target_model_alias == '*':
            merged_model_type, merged_model_alias, merged_model_instance = load_distributed.load_distributed(
                destination_model_filename)
            plan = Plan()
            reset_destination_action = CreatePathAction(
                destination_model_filename.absolute(), clear_content=True)
            write_destination_action = WriteFileAction(
                destination_model_filename,
                Element(merged_model_instance),
                content_type=file_type)
            delete_target_action = RemovePathAction(
                Path(merged_model_alias).absolute())
            plan: Plan = Plan()
            plan.add_action(reset_destination_action)
            plan.add_action(write_destination_action)
            plan.add_action(delete_target_action)
            return plan

        # Get destination model without the target field stripped
        merged_model_type, merged_model_alias = fs.get_stripped_contextual_model(
            destination_model_filename.absolute(),
            aliases_not_to_be_stripped=[target_model_alias])
        """2. Load Target model. Target model could be stripped"""
        try:
            target_model_type = utils.get_target_model(element_path_list,
                                                       merged_model_type)
        except Exception as e:
            raise TrestleError(
                f'Target model not found. Possibly merge of the elements not allowed at this point. {str(e)}'
            )
        # target_model filename - depends whether destination model is decomposed or not
        if (Path(os.getcwd()) / destination_model_alias).exists():
            target_model_path = f'{os.getcwd()}/{destination_model_alias}/{target_model_alias}'
        else:
            target_model_path = target_model_alias

        # if target model is a file then handle file. If file doesn't exist, handle the directory,
        # but in this case it's a list or a dict collection type
        if (Path(f'{target_model_path}{file_ext}')).exists():
            target_model_filename = Path(f'{target_model_path}{file_ext}')
            _, _, target_model_object = load_distributed.load_distributed(
                target_model_filename)
        else:
            target_model_filename = Path(target_model_path)
            collection_type = utils.get_origin(target_model_type)
            _, _, target_model_object = load_distributed.load_distributed(
                target_model_filename, collection_type)

        if hasattr(target_model_object,
                   '__dict__') and '__root__' in target_model_object.__dict__:
            target_model_object = target_model_object.__dict__['__root__']
        """3. Insert target model into destination model."""
        merged_dict = destination_model_object.__dict__
        merged_dict[target_model_alias] = target_model_object
        merged_model_object = merged_model_type(**merged_dict)  # type: ignore
        merged_destination_element = Element(merged_model_object)
        """4. Create action  plan"""
        reset_destination_action = CreatePathAction(
            destination_model_filename.absolute(), clear_content=True)
        write_destination_action = WriteFileAction(destination_model_filename,
                                                   merged_destination_element,
                                                   content_type=file_type)
        delete_target_action = RemovePathAction(target_model_filename)

        plan: Plan = Plan()
        plan.add_action(reset_destination_action)
        plan.add_action(write_destination_action)
        plan.add_action(delete_target_action)

        # TODO: Destination model directory is empty or already merged? Then clean up.

        return plan