コード例 #1
0
    def test_caching(self, mock_generate_loaders):
        """Test caching."""
        # Load summaries the first time.
        job_num = 10
        summary_base_dir = tempfile.NamedTemporaryFile().name
        loader_dict = self._make_loader_dict(summary_base_dir, job_num)
        expected_loader_ids = list(loader_dict.keys())

        mock_generate_loaders.return_value = loader_dict
        generators = [data_manager.DataLoaderGenerator(summary_base_dir)]
        mock_data_manager = data_manager.DataManager(generators)
        mock_data_manager._execute_load_data = Mock()

        mock_data_manager.start_load_data(reload_interval=0)
        check_loading_done(mock_data_manager, 3)
        current_loader_ids = mock_data_manager._loader_pool.keys()

        assert sorted(current_loader_ids) == sorted(expected_loader_ids)

        # Add new summaries.
        new_loader_dict = self._make_loader_dict(summary_base_dir, 6, job_num)
        loader_dict.update(new_loader_dict)
        expected_loader_ids.extend(list(loader_dict.keys()))
        expected_loader_ids = expected_loader_ids[-MAX_DATA_LOADER_SIZE:]

        # Make sure to finish loading, make it init.
        mock_data_manager._status = DataManagerStatus.INIT
        mock_generate_loaders.return_value = loader_dict
        mock_data_manager.start_load_data(reload_interval=0)
        check_loading_done(mock_data_manager)
        current_loader_ids = mock_data_manager._loader_pool.keys()

        assert sorted(current_loader_ids) == sorted(expected_loader_ids)

        shutil.rmtree(summary_base_dir)
コード例 #2
0
    def load_data(self):
        """Load data."""
        log_operation = LogOperations()
        self._plugins_id_map = {'image': [], 'scalar': [], 'graph': [], 'histogram': [], 'tensor': []}
        self._events_names = []
        self._train_id_list = []

        self._root_dir = tempfile.mkdtemp()
        for i in range(self._dir_num):
            dir_path = tempfile.mkdtemp(dir=self._root_dir)
            tmp_tag_name = self._tag_name + '_' + str(i)
            event_name = str(i) + "_name"
            train_id = dir_path.replace(self._root_dir, ".")

            # Pass timestamp to write to the same file.
            log_settings = dict(steps=self._steps_list, tag=tmp_tag_name, time=time.time())
            if i % 3 != 0:
                log_operation.generate_log(PluginNameEnum.IMAGE.value, dir_path, log_settings)
                self._plugins_id_map['image'].append(train_id)
            if i % 3 != 1:
                log_operation.generate_log(PluginNameEnum.SCALAR.value, dir_path, log_settings)
                self._plugins_id_map['scalar'].append(train_id)
            if i % 3 != 2:
                log_operation.generate_log(PluginNameEnum.GRAPH.value, dir_path, log_settings)
                self._plugins_id_map['graph'].append(train_id)
            self._events_names.append(event_name)
            self._train_id_list.append(train_id)

        self._generated_path.append(self._root_dir)

        self._mock_data_manager = data_manager.DataManager(self._root_dir)
        self._mock_data_manager.start_load_data().join()
コード例 #3
0
    def test_caching(self, mock_generate_loaders):
        """Test caching."""
        # Load summaries the first time.
        job_num = 10
        summary_base_dir = tempfile.NamedTemporaryFile().name
        loader_dict = self._make_loader_dict(summary_base_dir, job_num)
        expected_loader_ids = list(loader_dict.keys())

        mock_generate_loaders.return_value = loader_dict
        mock_data_manager = data_manager.DataManager(summary_base_dir)
        mock_data_manager._detail_cache._execute_loader = Mock()

        mock_data_manager.start_load_data().join()
        current_loader_ids = mock_data_manager._detail_cache._loader_pool.keys()

        assert sorted(current_loader_ids) == sorted(expected_loader_ids)

        # Add new summaries.
        new_loader_dict = self._make_loader_dict(summary_base_dir, 6, job_num)
        loader_dict.update(new_loader_dict)
        expected_loader_ids.extend(list(loader_dict.keys()))
        expected_loader_ids = expected_loader_ids[-MAX_DATA_LOADER_SIZE:]

        mock_generate_loaders.return_value = loader_dict
        mock_data_manager.start_load_data().join()
        current_loader_ids = mock_data_manager._detail_cache._loader_pool.keys()

        assert sorted(current_loader_ids) == sorted(expected_loader_ids)

        shutil.rmtree(summary_base_dir)
コード例 #4
0
    def _init_data_manager(self, steps_list):
        """
        Generate log and init data_manager.

        Args:
            steps_list (list): Init steps.

        """
        summary_base_dir = tempfile.mkdtemp()
        log_dir = tempfile.mkdtemp(dir=summary_base_dir)

        self._train_id = log_dir.replace(summary_base_dir, ".")

        self._temp_path, self._images_metadata, self._images_values = LogOperations.generate_log(
            PluginNameEnum.IMAGE.value, log_dir,
            dict(steps=steps_list, tag=self._tag_name))

        self._generated_path.append(summary_base_dir)

        self._mock_data_manager = data_manager.DataManager(
            [DataLoaderGenerator(summary_base_dir)])
        self._mock_data_manager.start_load_data(reload_interval=0)

        # wait for loading done
        check_loading_done(self._mock_data_manager, time_limit=5)
コード例 #5
0
    def test_get_flattened_lineage(self):
        """Test the function of get_flattened_lineage"""
        datamanager = data_manager.DataManager(SUMMARY_DIR)
        datamanager.register_brief_cache_item_updater(
            LineageCacheItemUpdater())
        datamanager.start_load_data().join()

        data = get_flattened_lineage(datamanager)
        assert data.get('[U]info') == ['info1']
コード例 #6
0
    def load_graph_record(self):
        """Load graph record."""
        summary_base_dir = tempfile.mkdtemp()
        log_dir = tempfile.mkdtemp(dir=summary_base_dir)
        self._train_id = log_dir.replace(summary_base_dir, ".")

        log_operation = LogOperations()
        self._temp_path, self._graph_dict, _ = log_operation.generate_log(PluginNameEnum.GRAPH.value, log_dir)
        self._generated_path.append(summary_base_dir)

        self._mock_data_manager = data_manager.DataManager(summary_base_dir)
        self._mock_data_manager.start_load_data().join()
コード例 #7
0
    def load_tensor_record(self):
        """Load tensor record."""
        summary_base_dir = tempfile.mkdtemp()
        log_dir = tempfile.mkdtemp(dir=summary_base_dir)
        self._train_id = log_dir.replace(summary_base_dir, ".")

        log_operation = LogOperations()
        self._temp_path, self._tensors, _ = log_operation.generate_log(
            PluginNameEnum.TENSOR.value, log_dir, dict(step=self._steps_list, tag=self._tag_name))
        self._generated_path.append(summary_base_dir)

        self._mock_data_manager = data_manager.DataManager(summary_base_dir)
        self._mock_data_manager.start_load_data().join()
コード例 #8
0
    def load_no_graph_record(self):
        """Load no graph record."""
        summary_base_dir = tempfile.mkdtemp()
        log_dir = tempfile.mkdtemp(dir=summary_base_dir)
        self._train_id = log_dir.replace(summary_base_dir, ".")

        log_operation = LogOperations()
        self._temp_path, _, _ = log_operation.generate_log(PluginNameEnum.IMAGE.value, log_dir,
                                                           dict(steps=self._steps_list, tag="image"))

        self._generated_path.append(summary_base_dir)

        self._mock_data_manager = data_manager.DataManager(summary_base_dir)
        self._mock_data_manager.start_load_data().join()
コード例 #9
0
    def load_scalar_record(self):
        """Load scalar record."""
        summary_base_dir = tempfile.mkdtemp()
        log_dir = tempfile.mkdtemp(dir=summary_base_dir)
        self._train_id = log_dir.replace(summary_base_dir, ".")

        log_operation = LogOperations()
        self._temp_path, self._scalars_metadata, self._scalars_values = log_operation.generate_log(
            PluginNameEnum.SCALAR.value, log_dir, dict(step=self._steps_list, tag=self._tag_name))
        self._generated_path.append(summary_base_dir)

        self._mock_data_manager = data_manager.DataManager(summary_base_dir)
        self._mock_data_manager.start_load_data(reload_interval=0)

        # wait for loading done
        check_loading_done(self._mock_data_manager, time_limit=5)
コード例 #10
0
    def test_start_load_data_success(self):
        """Test start_load_data method success."""
        summary_base_dir = tempfile.mkdtemp()
        dir_num = 3
        train_ids = []
        for i in range(dir_num):
            log_path = os.path.join(summary_base_dir, f'dir{i}')
            self._make_path_and_file_list(log_path)
            train_ids.append(f'./dir{i}')

        data_manager.logger = MockLogger
        mock_manager = data_manager.DataManager(summary_base_dir)
        mock_manager.start_load_data().join()

        assert MockLogger.log_msg['info'] == "Load brief data end, and loader pool size is '3'."
        shutil.rmtree(summary_base_dir)
コード例 #11
0
    def load_graph_record(self):
        """Load graph record."""
        summary_base_dir = tempfile.mkdtemp()
        log_dir = tempfile.mkdtemp(dir=summary_base_dir)
        self._train_id = log_dir.replace(summary_base_dir, ".")

        log_operation = LogOperations()
        self._temp_path, self._graph_dict, _ = log_operation.generate_log(
            PluginNameEnum.GRAPH.value, log_dir)
        self._generated_path.append(summary_base_dir)

        self._mock_data_manager = data_manager.DataManager(summary_base_dir)
        self._mock_data_manager.start_load_data(reload_interval=0)

        # wait for loading done
        check_loading_done(self._mock_data_manager, time_limit=5)
コード例 #12
0
    def load_data(self):
        """Load data."""
        self._plugins_id_map = {'image': [], 'scalar': [], 'graph': []}
        self._events_names = []
        self._train_id_list = []

        graph_base_path = os.path.join(os.path.dirname(__file__), os.pardir,
                                       "utils", "log_generators",
                                       "graph_base.json")

        self._root_dir = tempfile.mkdtemp()
        for i in range(self._dir_num):
            dir_path = tempfile.mkdtemp(dir=self._root_dir)

            tmp_tag_name = self._tag_name + '_' + str(i)
            event_name = str(i) + "_name"
            train_id = dir_path.replace(self._root_dir, ".")

            # Pass timestamp to write to the same file.
            log_settings = dict(steps=self._steps_list,
                                tag=tmp_tag_name,
                                graph_base_path=graph_base_path,
                                time=time.time())

            if i % 3 != 0:
                LogOperations.generate_log(PluginNameEnum.IMAGE.value,
                                           dir_path, log_settings)
                self._plugins_id_map['image'].append(train_id)
            if i % 3 != 1:
                LogOperations.generate_log(PluginNameEnum.SCALAR.value,
                                           dir_path, log_settings)
                self._plugins_id_map['scalar'].append(train_id)
            if i % 3 != 2:
                LogOperations.generate_log(PluginNameEnum.GRAPH.value,
                                           dir_path, log_settings)
                self._plugins_id_map['graph'].append(train_id)
            self._events_names.append(event_name)

            self._train_id_list.append(train_id)

        self._generated_path.append(self._root_dir)

        self._mock_data_manager = data_manager.DataManager(
            [DataLoaderGenerator(self._root_dir)])
        self._mock_data_manager.start_load_data(reload_interval=0)

        check_loading_done(self._mock_data_manager, time_limit=30)
コード例 #13
0
    def load_no_graph_record(self):
        """Load no graph record."""
        summary_base_dir = tempfile.mkdtemp()
        log_dir = tempfile.mkdtemp(dir=summary_base_dir)
        self._train_id = log_dir.replace(summary_base_dir, ".")

        self._temp_path, _, _ = LogOperations.generate_log(
            PluginNameEnum.IMAGE.value, log_dir,
            dict(steps=self._steps_list, tag="image"))

        self._generated_path.append(summary_base_dir)

        self._mock_data_manager = data_manager.DataManager(
            [DataLoaderGenerator(summary_base_dir)])
        self._mock_data_manager.start_load_data(reload_interval=0)

        # wait for loading done
        check_loading_done(self._mock_data_manager, time_limit=5)
コード例 #14
0
    def test_start_load_data_success(self):
        """Test start_load_data method success."""
        summary_base_dir = tempfile.mkdtemp()
        train_ids = []
        for i in range(3):
            log_path = os.path.join(summary_base_dir, f'dir{i}')
            self._make_path_and_file_list(log_path)
            train_ids.append(f'./dir{i}')

        data_manager.logger = MockLogger
        mock_manager = data_manager.DataManager(summary_base_dir)
        mock_manager.start_load_data(reload_interval=0)

        check_loading_done(mock_manager)

        assert MockLogger.log_msg['info'] == "Load event data end, status: 'DONE', " \
                                             "and loader pool size is '3'."
        shutil.rmtree(summary_base_dir)
コード例 #15
0
    def _init_data_manager(self, steps_list):
        """
        Generate log and init data_manager.

        Args:
            steps_list (list): Init steps.

        """
        summary_base_dir = tempfile.mkdtemp()
        log_dir = tempfile.mkdtemp(dir=summary_base_dir)
        self._train_id = log_dir.replace(summary_base_dir, ".")

        log_operation = LogOperations()
        self._temp_path, self._images_metadata, self._images_values = log_operation.generate_log(
            PluginNameEnum.IMAGE.value, log_dir, dict(steps=steps_list, tag=self._tag_name))
        self._generated_path.append(summary_base_dir)

        self._mock_data_manager = data_manager.DataManager(summary_base_dir)
        self._mock_data_manager.start_load_data().join()
コード例 #16
0
    def load_graph_record(self):
        """Load graph record."""
        summary_base_dir = tempfile.mkdtemp()
        log_dir = tempfile.mkdtemp(dir=summary_base_dir)

        self._train_id = log_dir.replace(summary_base_dir, ".")

        graph_base_path = os.path.join(os.path.dirname(__file__), os.pardir,
                                       "utils", "log_generators",
                                       "graph_base.json")
        self._temp_path, self._graph_dict = LogOperations.generate_log(
            PluginNameEnum.GRAPH.value, log_dir,
            dict(graph_base_path=graph_base_path))

        self._generated_path.append(summary_base_dir)

        self._mock_data_manager = data_manager.DataManager(
            [DataLoaderGenerator(summary_base_dir)])
        self._mock_data_manager.start_load_data(reload_interval=0)

        # wait for loading done
        check_loading_done(self._mock_data_manager, time_limit=5)