예제 #1
0
    def test_list_tensors_success(self):
        """Test list_tensors method success."""
        summary_base_dir = tempfile.mkdtemp()
        train_job_01 = 'train_01'
        name_01 = 'train_job_01'
        log_path_01 = os.path.join(summary_base_dir, 'dir1')
        self._make_path_and_file_list(log_path_01)
        modify_time_01 = 1575460551.9777446
        loader_01 = DataLoader(log_path_01)

        ms_loader = MSDataLoader(log_path_01)
        event_data = EventsData()
        mock_obj = mock.MagicMock()
        mock_obj.samples.return_value = {'test result'}
        tag = 'image'
        event_data._reservoir_by_tag = {tag: mock_obj}
        ms_loader._events_data = event_data
        loader_01._loader = ms_loader

        loader = LoaderStruct(loader_id=train_job_01,
                              name=name_01,
                              path=log_path_01,
                              latest_update_time=modify_time_01,
                              data_loader=loader_01)
        loader_pool = {train_job_01: loader}
        d_manager = DataManager([DataLoaderGenerator(summary_base_dir)])
        d_manager._status = DataManagerStatus.LOADING.value
        d_manager._loader_pool = loader_pool

        res = d_manager.list_tensors(train_job_01, tag)
        assert res == {'test result'}

        shutil.rmtree(summary_base_dir)
예제 #2
0
    def test_list_tensors_with_keyerror(self):
        """Test list_tensors method with parameter tag raises keyerror."""
        summary_base_dir = tempfile.mkdtemp()
        train_job_01 = 'train_01'
        name_01 = 'train_job_01'
        log_path_01 = os.path.join(summary_base_dir, 'dir1')
        self._make_path_and_file_list(log_path_01)
        modify_time_01 = 1575460551.9777446
        ms_loader = MSDataLoader(log_path_01)
        loader_01 = DataLoader(log_path_01)
        loader_01._loader = ms_loader

        loader = LoaderStruct(loader_id=train_job_01,
                              name=name_01,
                              path=log_path_01,
                              latest_update_time=modify_time_01,
                              data_loader=loader_01)
        loader_pool = {train_job_01: loader}
        d_manager = DataManager(summary_base_dir)
        d_manager._status = DataManagerStatus.LOADING.value
        d_manager._detail_cache._loader_pool = loader_pool
        tag = 'image'
        with pytest.raises(ParamValueError):
            d_manager.list_tensors(train_job_01, tag)

        shutil.rmtree(summary_base_dir)
예제 #3
0
 def test_start_load_data_with_invalid_params(self, params):
     """Test start_load_data with invalid reload_interval or invalid max_threads_count."""
     summary_base_dir = tempfile.mkdtemp()
     d_manager = DataManager([DataLoaderGenerator(summary_base_dir)])
     with pytest.raises(ParamValueError):
         d_manager.start_load_data(**params)
     shutil.rmtree(summary_base_dir)
예제 #4
0
 def test_list_tensors_with_not_exist_train_job(self):
     """Test list_tensors method with parameter train_id not found in loader_pool."""
     summary_base_dir = tempfile.mkdtemp()
     d_manager = DataManager([DataLoaderGenerator(summary_base_dir)])
     d_manager._status = DataManagerStatus.LOADING.value
     tag = 'image'
     train_job_01 = 'train_01'
     with pytest.raises(ParamValueError):
         d_manager.list_tensors(train_job_01, tag)
     shutil.rmtree(summary_base_dir)
예제 #5
0
 def test_list_tensors_with_not_exist_train_job(self):
     """Test list_tensors method with parameter train_id not found in loader_pool."""
     summary_base_dir = tempfile.mkdtemp()
     d_manager = DataManager(summary_base_dir)
     d_manager._status = DataManagerStatus.LOADING.value
     tag = 'image'
     train_job_01 = 'train_01'
     with pytest.raises(TrainJobNotExistError) as ex_info:
         d_manager.list_tensors(train_job_01, tag)
     shutil.rmtree(summary_base_dir)
     assert ex_info.value.message == 'Train job is not exist. Detail: Can not find the given train job in cache.'
예제 #6
0
    def setup_class(cls):
        data_manager = DataManager(BASE_SUMMARY_DIR)
        data_manager.register_brief_cache_item_updater(
            LineageCacheItemUpdater())
        data_manager.start_load_data().join()

        cls._data_manger = data_manager
예제 #7
0
    def setup_class(cls):
        data_manager = DataManager(BASE_SUMMARY_DIR)
        data_manager.register_brief_cache_item_updater(
            LineageCacheItemUpdater())
        data_manager.start_load_data(reload_interval=0)
        check_loading_done(data_manager)

        cls._data_manger = data_manager
예제 #8
0
def init_summary_logs():
    """Init summary logs."""
    try:
        if os.path.exists(summary_base_dir):
            shutil.rmtree(summary_base_dir)
        permissions = os.R_OK | os.W_OK | os.X_OK
        mode = permissions << 6
        if not os.path.exists(summary_base_dir):
            os.mkdir(summary_base_dir, mode=mode)
        global summaries_metadata, mock_data_manager
        log_operations = LogOperations()
        summaries_metadata = log_operations.create_summary_logs(
            summary_base_dir, constants.SUMMARY_DIR_NUM_FIRST,
            constants.SUMMARY_DIR_PREFIX)
        mock_data_manager = DataManager(
            [DataLoaderGenerator(summary_base_dir)])
        mock_data_manager.start_load_data(reload_interval=0)
        check_loading_done(mock_data_manager)

        summaries_metadata.update(
            log_operations.create_summary_logs(
                summary_base_dir, constants.SUMMARY_DIR_NUM_SECOND,
                constants.SUMMARY_DIR_NUM_FIRST))
        summaries_metadata.update(
            log_operations.create_multiple_logs(summary_base_dir,
                                                constants.MULTIPLE_DIR_NAME,
                                                constants.MULTIPLE_LOG_NUM))
        summaries_metadata.update(
            log_operations.create_reservoir_log(summary_base_dir,
                                                constants.RESERVOIR_DIR_NAME,
                                                constants.RESERVOIR_STEP_NUM))
        mock_data_manager.start_load_data(reload_interval=0)

        # Sleep 1 sec to make sure the status of mock_data_manager changed to LOADING.
        check_loading_done(mock_data_manager, first_sleep_time=1)

        # Maximum number of loads is `MAX_DATA_LOADER_SIZE`.
        for i in range(len(summaries_metadata) - MAX_DATA_LOADER_SIZE):
            summaries_metadata.pop("./%s%d" %
                                   (constants.SUMMARY_DIR_PREFIX, i))

        yield
    finally:
        if os.path.exists(summary_base_dir):
            shutil.rmtree(summary_base_dir)
예제 #9
0
def init_summary_logs():
    """Init summary logs."""
    try:
        if os.path.exists(summary_base_dir):
            shutil.rmtree(summary_base_dir)
        permissions = os.R_OK | os.W_OK | os.X_OK
        mode = permissions << 6
        if not os.path.exists(summary_base_dir):
            os.mkdir(summary_base_dir, mode=mode)
        global summaries_metadata, mock_data_manager
        log_operations = LogOperations()
        summaries_metadata = log_operations.create_summary_logs(summary_base_dir, constants.SUMMARY_DIR_NUM_FIRST,
                                                                constants.SUMMARY_DIR_PREFIX)
        mock_data_manager = DataManager(summary_base_dir)
        mock_data_manager.start_load_data().join()

        summaries_metadata.update(
            log_operations.create_summary_logs(summary_base_dir, constants.SUMMARY_DIR_NUM_SECOND,
                                               constants.SUMMARY_DIR_PREFIX,
                                               constants.SUMMARY_DIR_NUM_FIRST))
        summaries_metadata.update(
            log_operations.create_multiple_logs(summary_base_dir, constants.MULTIPLE_DIR_NAME,
                                                constants.MULTIPLE_LOG_NUM))
        summaries_metadata.update(
            log_operations.create_reservoir_log(summary_base_dir, constants.RESERVOIR_DIR_NAME,
                                                constants.RESERVOIR_STEP_NUM))
        mock_data_manager.start_load_data().join()

        # Maximum number of loads is `MAX_DATA_LOADER_SIZE`.
        for i in range(len(summaries_metadata) - MAX_DATA_LOADER_SIZE):
            summaries_metadata.pop("./%s%d" % (constants.SUMMARY_DIR_PREFIX, i))

        yield
    finally:
        if os.path.exists(summary_base_dir):
            shutil.rmtree(summary_base_dir)
예제 #10
0
import os
import shutil
import tempfile
from unittest.mock import Mock

import pytest
from flask import Response

from mindinsight.lineagemgr.cache_item_updater import LineageCacheItemUpdater
from mindinsight.datavisual.data_transform import data_manager
from mindinsight.datavisual.data_transform.data_manager import DataManager
from mindinsight.datavisual.utils import tools

SUMMARY_BASE_DIR = tempfile.NamedTemporaryFile(prefix='test_optimizer_summary_dir_base_').name
MOCK_DATA_MANAGER = DataManager(SUMMARY_BASE_DIR)
MOCK_DATA_MANAGER.register_brief_cache_item_updater(LineageCacheItemUpdater())
MOCK_DATA_MANAGER.start_load_data().join()


@pytest.fixture(scope="session")
def init_summary_logs():
    """Create summary directory."""
    try:
        if os.path.exists(SUMMARY_BASE_DIR):
            shutil.rmtree(SUMMARY_BASE_DIR)
        permissions = os.R_OK | os.W_OK | os.X_OK
        mode = permissions << 6
        if not os.path.exists(SUMMARY_BASE_DIR):
            os.mkdir(SUMMARY_BASE_DIR, mode=mode)
        yield
예제 #11
0
from mindinsight.datavisual.data_transform.data_manager import DataManager
from mindinsight.lineagemgr.cache_item_updater import LineageCacheItemUpdater

from ....utils import mindspore
from ....utils.mindspore.dataset.engine.serializer_deserializer import SERIALIZED_PIPELINE

sys.modules['mindspore'] = mindspore

BASE_SUMMARY_DIR = tempfile.NamedTemporaryFile(
    prefix='test_lineage_summary_dir_base_').name
SUMMARY_DIR = os.path.join(BASE_SUMMARY_DIR, 'run1')
SUMMARY_DIR_2 = os.path.join(BASE_SUMMARY_DIR, 'run2')
SUMMARY_DIR_3 = os.path.join(BASE_SUMMARY_DIR, 'except_run')

LINEAGE_DATA_MANAGER = DataManager(BASE_SUMMARY_DIR)
LINEAGE_DATA_MANAGER.register_brief_cache_item_updater(
    LineageCacheItemUpdater())

COLLECTION_MODULE = 'TestModelLineage'
API_MODULE = 'TestModelApi'
DATASET_GRAPH = SERIALIZED_PIPELINE


def get_module_name(nodeid):
    """Get the module name from nodeid."""
    _, module_name, _ = nodeid.split("::")
    return module_name


def pytest_collection_modifyitems(items):