def test_list_tensors_success(self): """Test list_tensors method success.""" summary_base_dir = tempfile.mkdtemp() train_job_01 = 'train_01' name_01 = 'train_job_01' log_path_01 = os.path.join(summary_base_dir, 'dir1') self._make_path_and_file_list(log_path_01) modify_time_01 = 1575460551.9777446 loader_01 = DataLoader(log_path_01) ms_loader = MSDataLoader(log_path_01) event_data = EventsData() mock_obj = mock.MagicMock() mock_obj.samples.return_value = {'test result'} tag = 'image' event_data._reservoir_by_tag = {tag: mock_obj} ms_loader._events_data = event_data loader_01._loader = ms_loader loader = LoaderStruct(loader_id=train_job_01, name=name_01, path=log_path_01, latest_update_time=modify_time_01, data_loader=loader_01) loader_pool = {train_job_01: loader} d_manager = DataManager([DataLoaderGenerator(summary_base_dir)]) d_manager._status = DataManagerStatus.LOADING.value d_manager._loader_pool = loader_pool res = d_manager.list_tensors(train_job_01, tag) assert res == {'test result'} shutil.rmtree(summary_base_dir)
def test_list_tensors_with_keyerror(self): """Test list_tensors method with parameter tag raises keyerror.""" summary_base_dir = tempfile.mkdtemp() train_job_01 = 'train_01' name_01 = 'train_job_01' log_path_01 = os.path.join(summary_base_dir, 'dir1') self._make_path_and_file_list(log_path_01) modify_time_01 = 1575460551.9777446 ms_loader = MSDataLoader(log_path_01) loader_01 = DataLoader(log_path_01) loader_01._loader = ms_loader loader = LoaderStruct(loader_id=train_job_01, name=name_01, path=log_path_01, latest_update_time=modify_time_01, data_loader=loader_01) loader_pool = {train_job_01: loader} d_manager = DataManager([DataLoaderGenerator(summary_base_dir)]) d_manager._status = DataManagerStatus.LOADING.value d_manager._loader_pool = loader_pool tag = 'image' with pytest.raises(ParamValueError): d_manager.list_tensors(train_job_01, tag) shutil.rmtree(summary_base_dir)
def load(self, executor=None): """Load the data when loader is exist. Args: executor (Optional[Executor]): The executor instance. Returns: bool, True if the loader is finished loading. """ if self._loader is None: ms_dataloader = MSDataLoader(self._summary_dir) loaders = [ms_dataloader] for loader in loaders: if loader.filter_valid_files(): self._loader = loader break if self._loader is None: logger.warning( "No valid files can be loaded, summary_dir: %s.", self._summary_dir) raise exceptions.SummaryLogPathInvalid() return self._loader.load(executor)
def has_valid_files(self): """ Check the directory for valid files. Returns: bool, if the directory has valid files, return True. """ ms_dataloader = MSDataLoader(self._summary_dir) return bool(ms_dataloader.filter_valid_files())
def test_load_with_crc_fail(self): """Test when crc_fail and will not go to func _event_parse.""" summary_dir = tempfile.mkdtemp() file2 = os.path.join(summary_dir, 'summary.02') write_file(file2, SCALAR_RECORD) ms_loader = MSDataLoader(summary_dir) ms_loader.load() shutil.rmtree(summary_dir) assert 'Check crc failed' in str(MockLogger.log_msg['error'])
def test_load_with_crc_fail(self): """Test when crc_fail and will not go to func _event_parse.""" summary_dir = tempfile.mkdtemp() file2 = os.path.join(summary_dir, 'summary.02') write_file(file2, SCALAR_RECORD) ms_loader = MSDataLoader(summary_dir) ms_loader.load(ComputingResourceManager(1, 1)) shutil.rmtree(summary_dir) assert 'Check crc faild and ignore this file' in str( MockLogger.log_msg['warning'])
def test_check_files_update_success_deleted_files(self): """Test new file list delete some files.""" old_file_list = ['summary.01', 'summary.02'] new_file_list = ['summary02'] summary_dir = tempfile.mkdtemp() ms_loader = MSDataLoader(summary_dir) ms_loader._check_files_deleted(new_file_list, old_file_list) shutil.rmtree(summary_dir) assert MockLogger.log_msg['warning'] == "There are some files has been deleted, " \ "we will reload all files in path {}.".format(summary_dir)
def test_load_success_with_crc_pass(self): """Test load success.""" summary_dir = tempfile.mkdtemp() file1 = os.path.join(summary_dir, 'summary.01') write_file(file1, SCALAR_RECORD) ms_loader = MSDataLoader(summary_dir) ms_loader._latest_summary_filename = 'summary.00' ms_loader.load(ComputingResourceManager(1, 1)) shutil.rmtree(summary_dir) tag = ms_loader.get_events_data().list_tags_by_plugin('scalar') tensors = ms_loader.get_events_data().tensors(tag[0]) assert len(tensors) == 3
def test_load_single_pb_file(self): """Test load pb file success.""" filename = 'ms_output.pb' summary_dir = tempfile.mkdtemp() create_graph_pb_file(output_dir=summary_dir, filename=filename) ms_loader = MSDataLoader(summary_dir) ms_loader.load(ComputingResourceManager(1, 1)) events_data = ms_loader.get_events_data() plugins = events_data.list_tags_by_plugin(PluginNameEnum.GRAPH.value) shutil.rmtree(summary_dir) assert len(plugins) == 1 assert plugins[0] == filename
def test_load_success_with_crc_pass(self, crc_pass): """Test load success.""" summary_dir = tempfile.mkdtemp() file1 = os.path.join(summary_dir, 'summary.01') write_file(file1, SCALAR_RECORD) ms_loader = MSDataLoader(summary_dir) ms_loader._latest_summary_filename = 'summary.00' ms_loader.load() assert ms_loader._latest_summary_filename == 'summary.01' assert ms_loader._latest_summary_file_size == RECORD_LEN tag = ms_loader.get_events_data().list_tags_by_plugin('scalar') tensors = ms_loader.get_events_data().tensors(tag[0]) assert len(tensors) == 3
def load(self): """Load the data when loader is exist.""" if self._loader is None: ms_dataloader = MSDataLoader(self._summary_dir) loaders = [ms_dataloader] for loader in loaders: if loader.filter_valid_files(): self._loader = loader break if self._loader is None: logger.warning( "No valid files can be loaded, summary_dir: %s.", self._summary_dir) raise exceptions.SummaryLogPathInvalid() self._loader.load()
def test_filter_event_files(self): """Test filter_event_files function ok.""" file_list = [ 'abc.summary', '123sumary0009abc', 'summary1234', 'aaasummary.5678', 'summary.0012', 'hellosummary.98786', 'mysummary.123abce', 'summay.4567' ] summary_dir = tempfile.mkdtemp() for file in file_list: with open(os.path.join(summary_dir, file), 'w'): pass ms_loader = MSDataLoader(summary_dir) res = ms_loader.filter_valid_files() expected = sorted([ 'aaasummary.5678', 'summary.0012', 'hellosummary.98786', 'mysummary.123abce' ]) shutil.rmtree(summary_dir) assert sorted(res) == expected
def load(self, computing_resource_mgr): """Load the data when loader is exist. Args: computing_resource_mgr (ComputingResourceManager): The ComputingResourceManager instance. """ if self._loader is None: ms_dataloader = MSDataLoader(self._summary_dir) loaders = [ms_dataloader] for loader in loaders: if loader.filter_valid_files(): self._loader = loader break if self._loader is None: logger.warning( "No valid files can be loaded, summary_dir: %s.", self._summary_dir) raise exceptions.SummaryLogPathInvalid() self._loader.load(computing_resource_mgr)