Пример #1
0
 def setUp(self):
     super(TestDataPipeBuilder, self).setUp()
     test_utils.register_new_class("reader1", nc7.data.DataReader)
     test_utils.register_new_class("reader2", nc7.data.DataReader)
     test_utils.register_new_class("processor1", nc7.data.DataProcessor)
     test_utils.register_new_class("processor2", nc7.data.DataProcessor)
     test_utils.register_new_class("nucleotide1", nc7.core.Nucleotide)
Пример #2
0
    def test_get_sample_config_for_nucleotide(self):
        class_name = 'DummyPluginWithDummyParameter'
        register_new_class(class_name, DummyPluginWithDummyParameter)

        inbound_nodes = ['TODO_SET_CORRECT_INBOUND_NODES']
        incoming_keys_mapping = {
            'TODO_SET_INPUT_PLUGIN_NAME_0': {
                'TODO_SET_INPUT_PLUGIN_OUTPUT_KEY_NAME_0': 'inputs_mlp'
            }
        }
        correct_dict = {
            'inbound_nodes': inbound_nodes,
            'incoming_keys_mapping': incoming_keys_mapping,
            'dummy_parameter': 'TODO_SET_CORRECT_VALUE',
            'name': class_name,
            'activation': 'elu',
            'initializer': None,
            'dropout': 'normal',
            'trainable': True,
            'stop_gradient_from_inputs': False,
            'load_fname': None,
            'load_var_scope': None,
            'exclude_from_restore': False,
            'optimization_configs': None,
            'data_format': None,
            'allow_mixed_precision': True,
            'class_name': class_name
        }

        out_dict = nucleotide_utils.get_sample_config_for_nucleotide(
            class_name)
        keys_out = list(out_dict)
        keys_correct = list(correct_dict)
        self.assertSetEqual(set(keys_correct), set(keys_out))
        self.assertDictEqual(correct_dict, out_dict)
Пример #3
0
    def test_build_registered_object(self):
        register_new_class('dummy_plugin', nc7.model.ModelPlugin)
        name = "plugin_name"
        inbound_nodes = ['node1', 'node2']
        plugin = builder_lib.build_registered_object(
            base_cls=nc7.model.ModelPlugin,
            class_name="dummy_plugin",
            name=name,
            inbound_nodes=inbound_nodes)
        self.assertIsInstance(plugin, nc7.model.ModelPlugin)
        self.assertTrue(plugin.build)
        self.assertEqual(plugin.name, name)
        self.assertListEqual(plugin.inbound_nodes, inbound_nodes)

        with self.assertRaises(ValueError):
            plugin2 = builder_lib.build_registered_object(
                base_cls=nc7.model.ModelMetric,
                class_name="dummy_plugin",
                name=name,
                inbound_nodes=inbound_nodes)

        plugin3 = builder_lib.build_registered_object(
            base_cls=nc7.model.ModelMetric,
            default_cls=nc7.model.ModelPlugin,
            inbound_nodes=inbound_nodes)
        self.assertIsInstance(plugin3, nc7.model.ModelPlugin)
        self.assertEqual(plugin3.name, plugin3.__class__.__name__)
Пример #4
0
 def test_build_single_file_list(self):
     register_new_class('file_list1', FileListDummy)
     config_file_list = {
         'class_name': 'file_list1',
         'file_names': self.file_names1
     }
     file_list = file_list_builder.build(config_file_list)
     self.assertIsInstance(file_list, nc7.data.FileList)
     self.assertEqual(self.number_of_files1, len(file_list))
     self.assertDictEqual(self.file_names1, file_list.get())
Пример #5
0
    def setUp(self):
        reset_register_and_logger()
        register_new_class("plugin1", _KPIPlugin)
        register_new_class('plugin2', _KPIPlugin)
        register_new_class("accumulator1", _KPIAccumulator)
        register_new_class("accumulator2", _KPIAccumulator)

        self.plugins_and_accumulators_configs = [
            {
                "class_name": "plugin1",
                "parameter1": 10
            },
            {
                "class_name": "plugin2",
                "parameter1": 20,
                "cachers": [{
                    "class_name": "kpi_builder_test._KPICacher"
                }]
            },
            {
                "class_name": "accumulator1",
                "parameter2": 100,
                "cachers": [{
                    "class_name": "kpi_builder_test._KPICacher",
                    "p": 10
                }],
                "savers": [{
                    "class_name": "kpi_builder_test._KPISaver",
                    "p3": 5
                }]
            },
            {
                "class_name": "accumulator2",
                "parameter2": 200,
                "savers": [{
                    "class_name": "kpi_builder_test._KPISaver2",
                    "p4": 50
                }]
            },
        ]
        self.project_global_config = {
            "KPIPlugin": {
                "cachers": [{
                    "class_name": "kpi_builder_test._KPICacher2",
                    "p2": 5
                }],
                "savers": [{
                    "class_name": "kpi_builder_test._KPISaver2",
                    "p4": 100
                }]
            }
        }
        project_global_config.clear_project_global_config()
        super().setUp()
Пример #6
0
    def test_build_file_list_with_filter(self, single_data_filter):
        register_new_class('file_list1', FileListDummy)

        class _DummyDataFilter1(DataFilter):
            pass

        class _DummyDataFilter2(DataFilter):
            pass

        if single_data_filter:
            data_filter_config = {
                "data_filter": {
                    "class_name": "_DummyDataFilter1"
                }
            }
        else:
            data_filter_config = {
                "data_filter": [{
                    "class_name": "_DummyDataFilter1"
                }, {
                    "class_name": "_DummyDataFilter2"
                }, {
                    "class_name": "_DummyDataFilter2"
                }]
            }

        config_file_list = {
            "class_name": "file_list1",
            "file_names": self.file_names1
        }
        config_file_list.update(data_filter_config)
        file_list = file_list_builder.build(config_file_list)

        number_of_data_filters_must = single_data_filter and 1 or 3
        self.assertEqual(number_of_data_filters_must,
                         len(file_list.data_filters))

        data_filter_types_must = (
            single_data_filter and [_DummyDataFilter1]
            or [_DummyDataFilter1, _DummyDataFilter2, _DummyDataFilter2])
        for each_data_filter, each_data_filter_type_must in zip(
                file_list.data_filters, data_filter_types_must):
            self.assertIsInstance(each_data_filter, each_data_filter_type_must)
Пример #7
0
 def test_build_list_of_file_list(self):
     register_new_class('file_list2', FileListDummy)
     register_new_class('file_list3', FileListDummy)
     config_file_list = [{
         'class_name': 'file_list2',
         'file_names': self.file_names1
     }, {
         'class_name': 'file_list2',
         'file_names': self.file_names2
     }, {
         'class_name': 'file_list3',
         'file_names': self.file_names3
     }]
     file_list = file_list_builder.build(config_file_list)
     self.assertIsInstance(file_list, nc7.data.FileList)
     self.assertEqual(
         self.number_of_files1 + self.number_of_files2 +
         self.number_of_files3, len(file_list))
     file_names_1_2_3 = {}
     for each_key in self.file_names1.keys():
         file_names_1_2_3[each_key] = (self.file_names1[each_key] +
                                       self.file_names2[each_key])
     self.assertDictEqual(self.file_names_1_2_3, file_list.get())
Пример #8
0
 def test_get_callbacks(self):
     register_new_class('dummy_callback_1',
                        nc7.coordinator.CoordinatorCallback)
     register_new_class('dummy_callback_2',
                        nc7.coordinator.CoordinatorCallback)
     config_callbacks = [
         {
             'class_name': 'dummy_callback_1',
             'name': 'callback1'
         },
         {
             'class_name': 'dummy_callback_2',
             'name': 'callback2'
         },
     ]
     callbacks = callback_builder.build_callbacks_chain(config_callbacks)
     for each_callback in callbacks:
         self.assertTrue(each_callback.built)
     self.assertEqual(2, len(callbacks))
     for callback in callbacks:
         self.assertIsInstance(callback,
                               nc7.coordinator.CoordinatorCallback)
         self.assertTrue(callback.built)
Пример #9
0
 def setUp(self):
     reset_register_and_logger()
     register_new_class("dataset_with_file_list", _DatasetWithFileList)
     register_new_class("dataset_without_file_list",
                        _DatasetWithoutFileList)
     register_new_class("FileListDummy",
                        FileListDummy)
     self.number_of_files1 = 20
     self.number_of_files2 = 10
     self.file_names1 = {'data': ['input_1_{:03d}.ext'.format(i)
                                  for i in range(self.number_of_files1)]}
     self.file_names2 = {'data': ['input2_1_{:03d}.ext'.format(i)
                                  for i in range(self.number_of_files2)]}
Пример #10
0
    def setUp(self):
        reset_register_and_logger()

        register_new_class('dummy_dataset_train',
                           nc7.data.Dataset)
        register_new_class('dummy_dataset_eval',
                           nc7.data.Dataset)
        register_new_class('dummy_callback_train',
                           nc7.coordinator.CoordinatorCallback)
        register_new_class('dummy_plugin_train',
                           nc7.model.ModelPlugin)
        register_new_class('dummy_loss',
                           nc7.model.ModelLoss)
        register_new_class('dummy_pp',
                           nc7.model.ModelPostProcessor)
        register_new_class('dummy_metrics',
                           nc7.model.ModelMetric)
        register_new_class('dummy_summaries',
                           nc7.model.ModelSummary)
        register_new_class('dummy_dataset_fl_train',
                           _DatasetFileListDummy)
        register_new_class('dummy_dataset_fl_eval',
                           _DatasetFileListDummy)
        register_new_class('dummy_file_list',
                           FileListDummy)

        register_new_class('dummy_data_feeder',
                           nc7.data.DataFeeder)
        register_new_class('dummy_callback_infer',
                           nc7.coordinator.CoordinatorCallback)
        register_new_class('dummy_data_feeder_fl',
                           _DatafeederFileListDummy)
        register_new_class('dummy_file_list_infer',
                           FileListDummy)

        register_new_class("dummy_kpi_plugin1",
                           nc7.kpi.KPIPlugin)
        register_new_class("dummy_kpi_plugin2",
                           nc7.kpi.KPIPlugin)
        register_new_class("dummy_kpi_accumulator",
                           nc7.kpi.KPIAccumulator)
Пример #11
0
 def setUp(self) -> None:
     reset_register_and_logger()
     register_new_class("data_filter1", _DummyDataFilter1)
     register_new_class("data_filter2", _DummyDataFilter2)
     self.config1 = {"class_name": "data_filter1", "p1": 20, "p2": 500}
     self.config2 = {"class_name": "data_filter2", "p1": 100}
Пример #12
0
    def test_build_data_object_from_config(self,
                                           with_file_list,
                                           with_data_filter,
                                           with_data_pipe=False,
                                           with_file_list_mapping=False):
        config_object = {"random_seed": 65477}
        if with_data_pipe:
            built_fn = lambda x: Dataset.from_data_pipe(**x).build()
            dataset_base_cls = Dataset
        else:
            if with_file_list:
                register_new_class("dummy_dataset", _DummyDatasetFileList)
                dataset_base_cls = _DummyDatasetFileList
                built_fn = None
            else:
                register_new_class("dummy_dataset", _DummyDataset)
                dataset_base_cls = _DummyDataset
                built_fn = None

            config_object.update({"class_name": "dummy_dataset", "p1": 100})
        file_list_keys_mapping = {"key1": "key1_r"}
        if with_file_list:
            config_object["file_list"] = {
                "class_name": "file_list1",
                "file_names": {
                    "key1": ["value1"]
                },
                "name": "file_list_name"
            }
            _DummyDatasetFileList.file_list_keys = ["key1"]
            if with_file_list_mapping:
                config_object["file_list_keys_mapping"] = {"key1": "key1_r"}
                _DummyDatasetFileList.file_list_keys = ["key1_r"]
        if with_data_filter:
            config_object["data_filter"] = {
                "class_name": "data_filter1",
                "dp1": 1
            }

        if with_data_pipe:
            reader_config1 = {"class_name": "reader1", "name": "reader1_name"}
            reader_config2 = {"class_name": "reader2", "name": "reader2_name"}
            processor_config1 = {
                "class_name": "processor1",
                "name": "processor1_name"
            }
            processor_config2 = {
                "class_name": "processor2",
                "name": "processor2_name"
            }
            if with_file_list:
                config_object["data_pipe"] = [
                    processor_config1, reader_config1, reader_config2,
                    processor_config2
                ]
            else:
                config_object["data_pipe"] = [
                    processor_config1, processor_config2
                ]
        if with_data_pipe and with_file_list_mapping:
            with self.assertRaises(ValueError):
                data_builder_lib.build_data_object_from_config(
                    config_object,
                    base_cls=dataset_base_cls,
                    built_fn=built_fn)
            return

        built_object = data_builder_lib.build_data_object_from_config(
            config_object, base_cls=dataset_base_cls, built_fn=built_fn)
        self.assertTrue(built_object.built)
        self.assertEqual(65477, built_object.random_seed)

        self.assertIsInstance(built_object, dataset_base_cls)
        if with_data_filter:
            self.assertEqual(1, len(built_object.data_filters))
            data_filter = built_object.data_filters[0]
            self.assertTrue(data_filter.built)
            self.assertIsInstance(data_filter, _DummyDataFilter)
            self.assertEqual(1, data_filter.dp1)
        else:
            self.assertIsNone(built_object.data_filters)

        if with_file_list:
            file_list = built_object.file_list
            self.assertIsInstance(file_list, FileListDummy)
            self.assertTrue(file_list.built)
            self.assertEqual("file_list_name", file_list.name)
            file_names_must = ({
                "key1_r": ["value1"]
            } if with_file_list_mapping else {
                "key1": ["value1"]
            })
            self.assertDictEqual(file_names_must, file_list.get())
            if with_file_list_mapping:
                self.assertDictEqual(file_list_keys_mapping,
                                     built_object.file_list_keys_mapping)
            else:
                self.assertIsNone(built_object.file_list_keys_mapping)
        else:
            self.assertFalse(hasattr(built_object, "file_list"))
            self.assertFalse(hasattr(built_object, "file_list_keys_mapping"))

        if with_data_pipe:
            data_pipe = built_object.data_pipe
            self.assertIsInstance(data_pipe, DataPipe)
            self.assertTrue(data_pipe.built)
            if with_file_list:
                self.assertSetEqual(
                    {"reader1_name", "reader2_name"},
                    {r.name
                     for r in data_pipe.readers.values()})
            else:
                self.assertDictEqual({}, data_pipe.readers)
            self.assertSetEqual(
                {"processor1_name", "processor2_name"},
                {r.name
                 for r in data_pipe.processors.values()})
Пример #13
0
 def setUp(self):
     reset_register_and_logger()
     register_new_class("data_filter1", _DummyDataFilter)
     register_new_class("file_list1", FileListDummy)
     register_new_class("reader1", _DataReaderTF)
     register_new_class("reader2", _DataReaderTF)
     register_new_class("processor1", _DataProcessorTF)
     register_new_class("processor2", _DataProcessorTF)