Exemplo n.º 1
0
    def test_only_appropriate_launcher_is_filtered_by_user_input_devices(self):
        config_launchers = [
            {
                'framework': 'dlsdk',
                'model': '/absolute_path1',
                'weights': '/absolute_path1',
                'adapter': 'classification',
                'device': 'CPU'
            },
            {
                'framework': 'dlsdk',
                'model': '/absolute_path1',
                'weights': '/absolute_path1',
                'adapter': 'classification',
                'device': 'HETERO:CPU,GPU'
            },
            {
                'framework': 'caffe',
                'model': '/absolute_path2',
                'weights': '/absolute_path2',
                'adapter': 'classification',
                'device': 'GPU'
            }
        ]
        config = {'models': [{'name': 'name', 'launchers': config_launchers}]}
        self.arguments.target_devices = ['GPU', 'CPU']

        ConfigReader._filter_launchers(config, self.arguments)

        launchers = config['models'][0]['launchers']
        assert launchers == [config_launchers[0], config_launchers[2]]
Exemplo n.º 2
0
    def test_expand_relative_paths_in_launchers_config_using_command_line(self):
        local_config = {'models': [{
            'name': 'model',
            'launchers': [{
                'framework': 'dlsdk',
                'model': 'relative_model_path',
                'weights': 'relative_weights_path',
                'cpu_extensions': 'relative_extensions_path',
                'gpu_extensions': 'relative_extensions_path',
                'caffe_model': 'relative_model_path',
                'caffe_weights': 'relative_weights_path',
                'tf_model': 'relative_model_path',
                'mxnet_weights': 'relative_weights_path',
                'bitstream': 'relative_bitstreams_path'
            }]
        }]}

        expected = copy.deepcopy(local_config['models'][0]['launchers'][0])
        expected['model'] = self.arguments.models / 'relative_model_path'
        expected['caffe_model'] = self.arguments.models / 'relative_model_path'
        expected['tf_model'] = self.arguments.models / 'relative_model_path'
        expected['weights'] = self.arguments.models / 'relative_weights_path'
        expected['caffe_weights'] = self.arguments.models / 'relative_weights_path'
        expected['mxnet_weights'] = self.arguments.models / 'relative_weights_path'
        expected['cpu_extensions'] = self.arguments.extensions / 'relative_extensions_path'
        expected['gpu_extensions'] = self.arguments.extensions / 'relative_extensions_path'
        expected['bitstream'] = self.arguments.bitstreams / 'relative_bitstreams_path'

        ConfigReader._merge_paths_with_prefixes(self.arguments, local_config)

        assert local_config['models'][0]['launchers'][0] == expected
Exemplo n.º 3
0
    def test_both_launchers_are_filtered_by_other_devices(self):
        config_launchers = [
            {
                'framework': 'dlsdk',
                'model': '/absolute_path1',
                'weights': '/absolute_path1',
                'adapter': 'classification',
                'device': 'CPU'
            },
            {
                'framework': 'caffe',
                'model': '/absolute_path2',
                'weights': '/absolute_path2',
                'adapter': 'classification',
                'device': 'CPU'
            }
        ]
        config = {'models': [{'name': 'name', 'launchers': config_launchers}]}
        self.arguments.target_devices = ['FPGA', 'MYRIAD']

        with pytest.warns(Warning):
            ConfigReader._filter_launchers(config, self.arguments)

        launchers = config['models'][0]['launchers']
        assert len(launchers) == 0
Exemplo n.º 4
0
    def test_launcher_is_not_filtered_by_device_with_tail(self):
        config_launchers = [
            {
                'framework': 'dlsdk',
                'model': '/absolute_path1',
                'weights': '/absolute_path1',
                'adapter': 'classification',
                'device': 'CPU'
            },
            {
                'framework': 'caffe',
                'model': '/absolute_path2',
                'weights': '/absolute_path2',
                'adapter': 'classification',
                'device': 'GPU'
            }
        ]
        config = {'models': [{'name': 'name', 'launchers': config_launchers}]}
        self.arguments.target_devices = ['CPU', 'GPUunexepectedtail']

        ConfigReader._filter_launchers(config, self.arguments)

        launchers = config['models'][0]['launchers']
        assert len(launchers) == 1
        assert launchers[0] == config_launchers[0]
Exemplo n.º 5
0
    def test_expand_relative_paths_in_datasets_config_using_command_line(self):
        local_config = {
            'models': [{
                'name':
                'model',
                'datasets': [{
                    'name': 'global_dataset',
                    'dataset_meta': 'relative_annotation_path',
                    'data_source': 'relative_source_path',
                    'segmentation_masks_source': 'relative_source_path',
                    'annotation': 'relative_annotation_path'
                }]
            }]
        }
        expected_dataset = copy.deepcopy(
            local_config['models'][0]['datasets'][0])
        expected_dataset['annotation'] = self.merge(
            self.arguments.root, self.arguments.annotations,
            'relative_annotation_path')
        expected_dataset['dataset_meta'] = self.merge(
            self.arguments.root, self.arguments.annotations,
            'relative_annotation_path')
        expected_dataset['segmentation_masks_source'] = self.merge(
            self.arguments.root, self.arguments.source, 'relative_source_path')
        expected_dataset['data_source'] = self.merge(self.arguments.root,
                                                     self.arguments.source,
                                                     'relative_source_path')

        ConfigReader._merge_paths_with_prefixes(self.arguments, local_config)

        assert local_config['models'][0]['datasets'][0] == expected_dataset
Exemplo n.º 6
0
    def test_only_appropriate_launcher_is_filtered_by_another_framework(self):
        config_launchers = [
            {
                'framework': 'dlsdk',
                'model': '/absolute_path1',
                'weights': '/absolute_path1',
                'adapter': 'classification',
                'device': 'CPU'
            },
            {
                'framework': 'caffe',
                'model': '/absolute_path2',
                'weights': '/absolute_path2',
                'adapter': 'classification',
                'device': 'GPU'
            }
        ]
        config = {'models': [{'name': 'name', 'launchers': config_launchers}]}
        self.arguments.target_framework = 'caffe'

        ConfigReader._filter_launchers(config, self.arguments)

        launchers = config['models'][0]['launchers']
        assert len(launchers) == 1
        assert launchers[0] == config_launchers[1]
Exemplo n.º 7
0
    def test_empty_local_config_raises_value_error_exception(self, mocker):
        mocker.patch(self.module + '._read_configs',
                     return_value=(self.global_config, {}))

        with pytest.raises(ValueError) as exception:
            ConfigReader.merge(self.arguments)

        error_message = str(exception).split(sep=': ')[-1]
        assert error_message == 'Missing local config'
Exemplo n.º 8
0
    def test_invalid_model_raises_value_error_exception(self, mocker):
        mocker.patch(self.module + '._read_configs', return_value=(
            self.global_config, {'models': [{'name': None, 'launchers': None, 'datasets': None}]}
        ))

        with pytest.raises(ConfigError) as exception:
            ConfigReader.merge(self.arguments)

        error_message = str(exception).split(sep=': ')[-1]
        assert error_message == 'Each model must specify {}'.format(['name', 'launchers', 'datasets'])
Exemplo n.º 9
0
    def test_empty_models_in_local_config_raises_value_error_exception(self, mocker):
        mocker.patch(self.module + '._read_configs', return_value=(
            self.global_config, {'models': []}
        ))

        with pytest.raises(ConfigError) as exception:
            ConfigReader.merge(self.arguments)

        error_message = str(exception).split(sep=': ')[-1]
        assert error_message == 'Missed "{}" in local config'.format('models')
Exemplo n.º 10
0
    def test_launcher_is_filtered_by_another_framework(self):
        config = {'models': [{'name': 'name', 'launchers': [{
            'framework': 'dlsdk',
            'model': '/absolute_path',
            'weights': '/absolute_path',
            'adapter': 'classification'
        }]}]}
        self.arguments.target_framework = 'caffe'

        with pytest.warns(Warning):
            ConfigReader._filter_launchers(config, self.arguments)

        launchers = config['models'][0]['launchers']
        assert len(launchers) == 0
Exemplo n.º 11
0
    def test_launcher_is_not_filtered_by_the_same_device(self):
        config_launchers = [{
            'framework': 'dlsdk',
            'model': '/absolute_path1',
            'weights': '/absolute_path1',
            'adapter': 'classification',
            'device': 'CPU'
        }]
        config = {'models': [{'name': 'name', 'launchers': config_launchers}]}
        self.arguments.target_devices = ['CPU']

        ConfigReader._filter_launchers(config, self.arguments)

        launchers = config['models'][0]['launchers']
        assert launchers == config_launchers
Exemplo n.º 12
0
    def test_launcher_is_not_filtered_by_device_with_tail(self, mocker):
        config_launchers = [
            {
                'framework': 'dlsdk',
                'model': Path('/absolute_path1'),
                'weights': Path('/absolute_path1'),
                'adapter': 'classification',
                'device': 'CPU',
                '_model_optimizer': self.arguments.model_optimizer,
                '_models_prefix': self.arguments.models
            },
            {
                'framework': 'caffe',
                'model': Path('/absolute_path2'),
                'weights': Path('/absolute_path2'),
                'adapter': 'classification',
                'device': 'GPU'
            }
        ]
        local_config = {'models': [{'name': 'name', 'launchers': config_launchers, 'datasets': [{'name': 'dataset'}]}]}
        mocker.patch(self.module + '._read_configs', return_value=(None, local_config))
        args = copy.deepcopy(self.arguments)
        args.converted_models = None
        args.target_devices = ['CPU', 'GPU_unexpected_tail']

        config = ConfigReader.merge(args)

        launchers = config['models'][0]['launchers']
        assert len(launchers) == 1
        assert launchers[0] == config_launchers[0]
Exemplo n.º 13
0
    def test_launcher_with_several_tags_contained_at_least_one_from_target_tegs_is_not_filtered(self, mocker):
        config_launchers = [
            {
                'framework': 'dlsdk',
                'tags': ['tag1', 'tag2'],
                'model': Path('/absolute_path1'),
                'weights': Path('/absolute_path1'),
                'adapter': 'classification',
                'device': 'CPU',
                '_model_optimizer': self.arguments.model_optimizer,
                '_models_prefix': self.arguments.models
            }
        ]
        local_config = {'models': [{'name': 'name', 'launchers': config_launchers, 'datasets': [{'name': 'dataset'}]}]}
        mocker.patch(self.module + '._read_configs', return_value=(None, local_config))
        args = copy.deepcopy(self.arguments)
        args.model_optimizer = None
        args.converted_models = None
        args.target_tags = ['tag2']

        config = ConfigReader.merge(args)

        launchers = config['models'][0]['launchers']
        assert len(launchers) == 1
        assert launchers[0] == config_launchers[0]
Exemplo n.º 14
0
    def test_both_launchers_are_filtered_by_target_tags_if_tags_not_provided_in_config(self, mocker):
        config_launchers = [
            {
                'framework': 'dlsdk',
                'model': '/absolute_path1',
                'weights': '/absolute_path1',
                'adapter': 'classification',
                'device': 'CPU',
            },
            {
                'framework': 'dlsdk',
                'model': '/absolute_path2',
                'weights': '/absolute_path2',
                'adapter': 'classification',
                'device': 'GPU',
            }
        ]
        local_config = {'models': [{'name': 'name', 'launchers': config_launchers, 'datasets': [{'name': 'dataset'}]}]}
        self.arguments.target_tags = ['some_tag']

        mocker.patch(self.module + '._read_configs', return_value=(None, local_config))

        with pytest.warns(Warning):
            config = ConfigReader.merge(self.arguments)

        launchers = config['models'][0]['launchers']
        assert len(launchers) == 0
Exemplo n.º 15
0
    def test_only_appropriate_launcher_is_filtered_by_another_framework(self, mocker):
        config_launchers = [
            {
                'framework': 'dlsdk',
                'model': Path('/absolute_path1'),
                'weights': Path('/absolute_path1'),
                'adapter': 'classification',
                'device': 'CPU',
                '_model_optimizer': self.arguments.model_optimizer,
                '_models_prefix': self.arguments.models
            },
            {
                'framework': 'caffe',
                'model': Path('/absolute_path2'),
                'weights': Path('/absolute_path2'),
                'adapter': 'classification',
                'device': 'GPU'
            }
        ]
        local_config = {'models': [{'name': 'name', 'launchers': config_launchers, 'datasets': [{'name': 'dataset'}]}]}
        mocker.patch(self.module + '._read_configs', return_value=(None, local_config))
        self.arguments.target_framework = 'caffe'

        config = ConfigReader.merge(self.arguments)

        launchers = config['models'][0]['launchers']
        assert len(launchers) == 1
        assert launchers[0] == config_launchers[1]
Exemplo n.º 16
0
    def test_both_launchers_are_not_filtered_by_the_same_framework(self, mocker):
        config_launchers = [
            {
                'framework': 'dlsdk',
                'model': Path('/absolute_path1'),
                'weights': Path('/absolute_path1'),
                'adapter': 'classification',
                'device': 'CPU',
                '_model_optimizer': self.arguments.model_optimizer,
                '_models_prefix': self.arguments.models
            },
            {
                'framework': 'dlsdk',
                'model': Path('/absolute_path2'),
                'weights': Path('/absolute_path2'),
                'adapter': 'classification',
                'device': 'GPU',
                '_model_optimizer': self.arguments.model_optimizer,
                '_models_prefix': self.arguments.models
            }
        ]
        local_config = {'models': [{'name': 'name', 'launchers': config_launchers, 'datasets': [{'name': 'dataset'}]}]}
        mocker.patch(self.module + '._read_configs', return_value=(None, local_config))
        args = copy.deepcopy(self.arguments)
        args.model_optimizer = None
        args.converted_models = None
        args.target_framework = 'dlsdk'

        config = ConfigReader.merge(args)

        launchers = config['models'][0]['launchers']
        assert launchers == config_launchers
Exemplo n.º 17
0
    def test_expand_relative_paths_in_datasets_config_using_command_line(self, mocker):
        local_config = {'models': [{
            'name': 'model',
            'launchers': [{'framework': 'caffe'}],
            'datasets': [{
                'name': 'global_dataset',
                'dataset_meta': 'relative_annotation_path',
                'data_source': 'relative_source_path',
                'segmentation_masks_source': 'relative_source_path',
                'annotation': 'relative_annotation_path'
            }]
        }]}

        mocker.patch(self.module + '._read_configs', return_value=(
            None, local_config
        ))
        expected = copy.deepcopy(local_config['models'][0]['datasets'][0])
        expected['annotation'] = self.arguments.annotations / 'relative_annotation_path'
        expected['dataset_meta'] = self.arguments.annotations / 'relative_annotation_path'
        expected['segmentation_masks_source'] = self.arguments.source / 'relative_source_path'
        expected['data_source'] = self.arguments.source / 'relative_source_path'

        config = ConfigReader.merge(self.arguments)

        assert config['models'][0]['datasets'][0] == expected
Exemplo n.º 18
0
    def test_both_launchers_are_filtered_by_other_devices(self, mocker):
        config_launchers = [
            {
                'framework': 'dlsdk',
                'model': '/absolute_path1',
                'weights': '/absolute_path1',
                'adapter': 'classification',
                'device': 'CPU',
            },
            {
                'framework': 'caffe',
                'model': '/absolute_path2',
                'weights': '/absolute_path2',
                'adapter': 'classification',
                'device': 'CPU'
            }
        ]
        local_config = {'models': [{'name': 'name', 'launchers': config_launchers, 'datasets': [{'name': 'dataset'}]}]}
        mocker.patch(self.module + '._read_configs', return_value=(None, local_config))
        self.arguments.target_devices = ['FPGA', 'MYRIAD']

        with pytest.warns(Warning):
            config = ConfigReader.merge(self.arguments)

        launchers = config['models'][0]['launchers']
        assert len(launchers) == 0
Exemplo n.º 19
0
    def test_both_launchers_are_filtered_by_another_tag(self, mocker):
        config_launchers = [
            {
                'framework': 'dlsdk',
                'tags': ['some_tag'],
                'model': '/absolute_path1',
                'weights': '/absolute_path1',
                'adapter': 'classification',
                'device': 'CPU',
                '_model_optimizer': self.arguments.model_optimizer,
                '_models_prefix': self.arguments.models
            },
            {
                'framework': 'dlsdk',
                'tags': ['some_tag'],
                'model': '/absolute_path2',
                'weights': '/absolute_path2',
                'adapter': 'classification',
                'device': 'GPU',
                '_model_optimizer': self.arguments.model_optimizer,
                '_models_prefix': self.arguments.models
            }
        ]
        local_config = {'models': [{'name': 'name', 'launchers': config_launchers, 'datasets': [{'name': 'dataset'}]}]}
        mocker.patch(self.module + '._read_configs', return_value=(None, local_config))
        args = copy.deepcopy(self.arguments)
        args.model_optimizer = None
        args.converted_models = None
        args.target_tags = ['other_tag']

        with pytest.warns(Warning):
            config = ConfigReader.merge(args)

        launchers = config['models'][0]['launchers']
        assert len(launchers) == 0
Exemplo n.º 20
0
    def test_merge_datasets_with_definitions(self):
        local_config = {'models': [{
            'name': 'model',
            'launchers': [{'framework': 'dlsdk', 'model': '/absolute_path', 'weights': '/absolute_path'}],
            'datasets': [{'name': 'global_dataset'}]
        }]}

        config = ConfigReader._merge_configs(self.global_config, local_config)

        assert config['models'][0]['datasets'][0] == self.global_datasets[0]
Exemplo n.º 21
0
    def test_merge_launchers_with_definitions(self):
        local_config = {'models': [{
            'name': 'model',
            'launchers': [{'framework': 'dlsdk'}],
            'datasets': [{'name': 'global_dataset'}]
        }]}

        config = ConfigReader._merge_configs(self.global_config, local_config)

        assert config['models'][0]['launchers'][0] == self.get_global_launcher('dlsdk')
Exemplo n.º 22
0
    def test_not_modify_absolute_paths_in_datasets_config_using_command_line(self):
        local_config = {'models': [{
            'name': 'model',
            'datasets': [{
                'name': 'global_dataset',
                'dataset_meta': '/absolute_annotation_meta_path',
                'data_source': '/absolute_source_path',
                'annotation': '/absolute_annotation_path',
            }]
        }]}

        expected = copy.deepcopy(local_config['models'][0]['datasets'][0])
        expected['annotation'] = Path('/absolute_annotation_path')
        expected['dataset_meta'] = Path('/absolute_annotation_meta_path')
        expected['data_source'] = Path('/absolute_source_path')

        ConfigReader._merge_paths_with_prefixes(self.arguments, local_config)

        assert local_config['models'][0]['datasets'][0] == expected
Exemplo n.º 23
0
    def test_merge_launchers_with_model_is_not_modified(self):
        local_config = {'models': [{
            'name': 'model',
            'launchers': [{'framework': 'dlsdk', 'model': 'custom'}],
            'datasets': [{'name': 'global_dataset'}]
        }]}
        expected = self.get_global_launcher('dlsdk')
        expected['model'] = 'custom'

        config = ConfigReader._merge_configs(self.global_config, local_config)

        assert config['models'][0]['launchers'][0] == expected
Exemplo n.º 24
0
    def test_merge_datasets_with_definitions_and_meta_is_not_modified(self):
        local_config = {'models': [{
            'name': 'model',
            'launchers': [{'framework': 'dlsdk', 'model': '/absolute_path', 'weights': '/absolute_path'}],
            'datasets': [{'name': 'global_dataset', 'dataset_meta': '/absolute_path'}]
        }]}
        expected = self.global_datasets[0]
        expected['dataset_meta'] = '/absolute_path'

        config = ConfigReader._merge_configs(self.global_config, local_config)

        assert config['models'][0]['datasets'][0] == expected
Exemplo n.º 25
0
    def test_merge_datasets_with_definitions(self, mocker):
        local_config = {'models': [{
            'name': 'model',
            'launchers': [{'framework': 'dlsdk', 'model': '/absolute_path', 'weights': '/absolute_path'}],
            'datasets': [{'name': 'global_dataset'}]
        }]}
        mocker.patch(self.module + '._read_configs', return_value=(
            self.global_config, local_config
        ))
        arguments = copy.deepcopy(self.arguments)
        arguments.model_optimizer = None

        config = ConfigReader.merge(arguments)

        assert config['models'][0]['datasets'][0] == self.global_datasets[0]
Exemplo n.º 26
0
    def test_merge_datasets_with_definitions_and_meta_is_not_modified(self, mocker):
        local_config = {'models': [{
            'name': 'model',
            'launchers': [{'framework': 'dlsdk', 'model': '/absolute_path', 'weights': '/absolute_path'}],
            'datasets': [{'name': 'global_dataset', 'dataset_meta': '/absolute_path'}]
        }]}
        expected = self.global_datasets[0]
        expected['dataset_meta'] = Path('/absolute_path')
        mocker.patch(self.module + '._read_configs', return_value=(
            self.global_config, local_config
        ))

        config = ConfigReader.merge(self.arguments)

        assert config['models'][0]['datasets'][0] == expected
Exemplo n.º 27
0
    def test_read_configs_without_global_config(self, mocker):
        config = {'models': [{
            'name': 'model',
            'launchers': [{'framework': 'dlsdk', 'model': '/absolute_path', 'weights': '/absolute_path'}],
            'datasets': [{'name': 'global_dataset'}]
        }]}
        empty_args = Namespace(**{
            'models': None, 'extensions': None, 'source': None, 'annotations': None,
            'converted_models': None, 'model_optimizer': None, 'bitstreams': None,
            'definitions': None, 'config': None,'stored_predictions': None,'tf_custom_op_config': None,
            'progress': 'bar', 'target_framework': None, 'target_devices': None, 'log_file': None
        })
        mocker.patch('accuracy_checker.utils.get_path', return_value=Path.cwd())
        mocker.patch('yaml.load', return_value=config)
        mocker.patch('pathlib.Path.open')

        _, local_config = ConfigReader._read_configs(empty_args)

        assert config == local_config
Exemplo n.º 28
0
    def test_merge_launchers_with_model_is_not_modified(self, mocker):
        local_config = {'models': [{
            'name': 'model',
            'launchers': [{'framework': 'dlsdk', 'model': 'custom'}],
            'datasets': [{'name': 'global_dataset'}]
        }]}
        expected = copy.deepcopy(self.get_global_launcher('dlsdk'))
        expected['model'] = 'custom'
        expected['bitstream'] = self.arguments.bitstreams / expected['bitstream']
        expected['cpu_extensions'] = self.arguments.extensions / expected['cpu_extensions']
        mocker.patch(self.module + '._read_configs', return_value=(
            self.global_config, local_config
        ))
        args = copy.deepcopy(self.arguments)
        args.model_optimizer = None
        args.models = None
        args.converted_models = None
        config = ConfigReader.merge(args)

        assert config['models'][0]['launchers'][0] == expected
Exemplo n.º 29
0
    def test_only_appropriate_launcher_is_filtered_by_user_input_devices(self, mocker):
        config_launchers = [
            {
                'framework': 'dlsdk',
                'model': Path('/absolute_path1'),
                'weights': Path('/absolute_path1'),
                'adapter': 'classification',
                'device': 'CPU',
                '_model_optimizer': self.arguments.model_optimizer,
                '_models_prefix': self.arguments.models
            },
            {
                'framework': 'dlsdk',
                'model': Path('/absolute_path1'),
                'weights': Path('/absolute_path1'),
                'adapter': 'classification',
                'device': 'HETERO:CPU,GPU',
                '_model_optimizer': self.arguments.model_optimizer,
                '_models_prefix': self.arguments.models
            },
            {
                'framework': 'caffe',
                'model': Path('/absolute_path2'),
                'weights': Path('/absolute_path2'),
                'adapter': 'classification',
                'device': 'GPU',
            }
        ]

        local_config = {'models': [{'name': 'name', 'launchers': config_launchers, 'datasets': [{'name': 'dataset'}]}]}
        mocker.patch(self.module + '._read_configs', return_value=(None, local_config))
        args = copy.deepcopy(self.arguments)
        args.converted_models = None
        args.target_devices = ['GPU', 'CPU']

        config = ConfigReader.merge(args)

        launchers = config['models'][0]['launchers']
        assert launchers == [config_launchers[0], config_launchers[2]]
Exemplo n.º 30
0
    def test_expand_relative_paths_in_launchers_config_using_command_line(self, mocker):
        local_config = {'models': [{
            'name': 'model',
            'launchers': [{
                'framework': 'dlsdk',
                'model': 'relative_model_path',
                'weights': 'relative_weights_path',
                'cpu_extensions': 'relative_extensions_path',
                'gpu_extensions': 'relative_extensions_path',
                'caffe_model': 'relative_model_path',
                'caffe_weights': 'relative_weights_path',
                'tf_model': 'relative_model_path',
                'mxnet_weights': 'relative_weights_path',
                'bitstream': 'relative_bitstreams_path'
            }],
            'datasets': [{'name': 'dataset'}]
        }]}
        mocker.patch(self.module + '._read_configs', return_value=(None, local_config))

        expected = copy.deepcopy(local_config['models'][0]['launchers'][0])
        expected['model'] = self.arguments.models / 'relative_model_path'
        expected['caffe_model'] = self.arguments.models / 'relative_model_path'
        expected['tf_model'] = self.arguments.models / 'relative_model_path'
        expected['weights'] = self.arguments.models / 'relative_weights_path'
        expected['caffe_weights'] = self.arguments.models / 'relative_weights_path'
        expected['mxnet_weights'] = self.arguments.models / 'relative_weights_path'
        expected['cpu_extensions'] = self.arguments.extensions / 'relative_extensions_path'
        expected['gpu_extensions'] = self.arguments.extensions / 'relative_extensions_path'
        expected['bitstream'] = self.arguments.bitstreams / 'relative_bitstreams_path'
        expected['_models_prefix'] = self.arguments.models
        args = copy.deepcopy(self.arguments)
        args.model_optimizer = None
        args.converted_models = None
        config = ConfigReader.merge(args)

        assert config['models'][0]['launchers'][0] == expected