예제 #1
0
    def _load(self, executor):
        """
        Load all log valid files.

        When the file is reloaded, it will continue to load from where it left off.

        Args:
            executor (executor): The Executor instance.

        Returns:
            bool, True if the train job is finished loading.
        """
        filenames = self.filter_valid_files()
        if not filenames:
            logger.warning("No valid files can be loaded, summary_dir: %s.",
                           self._summary_dir)
            raise exceptions.SummaryLogPathInvalid()
        old_filenames = list(self._valid_filenames)
        self._valid_filenames = filenames
        self._check_files_deleted(filenames, old_filenames)

        finished = True
        for parser in self._parser_list:
            finished = parser.parse_files(
                executor, filenames,
                events_data=self._events_data) and finished
        return finished
예제 #2
0
    def load(self, executor=None):
        """Load the data when loader is exist.

        Args:
            executor (Optional[Executor]): The executor instance.

        Returns:
            bool, True if the loader is finished loading.
        """

        if self._loader is None:
            ms_dataloader = MSDataLoader(self._summary_dir)
            loaders = [ms_dataloader]
            for loader in loaders:
                if loader.filter_valid_files():
                    self._loader = loader
                    break

            if self._loader is None:
                logger.warning(
                    "No valid files can be loaded, summary_dir: %s.",
                    self._summary_dir)
                raise exceptions.SummaryLogPathInvalid()

        return self._loader.load(executor)
예제 #3
0
    def load(self):
        """Load the data when loader is exist."""
        if self._loader is None:
            ms_dataloader = MSDataLoader(self._summary_dir)
            loaders = [ms_dataloader]
            for loader in loaders:
                if loader.filter_valid_files():
                    self._loader = loader
                    break

            if self._loader is None:
                logger.warning(
                    "No valid files can be loaded, summary_dir: %s.",
                    self._summary_dir)
                raise exceptions.SummaryLogPathInvalid()

        self._loader.load()
예제 #4
0
    def __init__(self, train_id, data_manager, tag=None):
        Validation.check_param_empty(train_id=train_id)
        super(GraphProcessor, self).__init__(data_manager)

        train_job = self._data_manager.get_train_job_by_plugin(
            train_id, PluginNameEnum.GRAPH.value)
        if train_job is None:
            raise exceptions.SummaryLogPathInvalid()
        if not train_job['tags']:
            raise ParamValueError(
                "Can not find any graph data in the train job.")

        if tag is None:
            tag = train_job['tags'][0]

        tensors = self._data_manager.list_tensors(train_id, tag=tag)
        self._graph = tensors[0].value
예제 #5
0
    def load(self):
        """
        Load all log valid files.

        When the file is reloaded, it will continue to load from where it left off.
        """
        logger.debug("Start to load data in ms data loader.")
        filenames = self.filter_valid_files()
        if not filenames:
            logger.warning("No valid files can be loaded, summary_dir: %s.", self._summary_dir)
            raise exceptions.SummaryLogPathInvalid()
        old_filenames = list(self._valid_filenames)
        self._valid_filenames = filenames
        self._check_files_deleted(filenames, old_filenames)

        for parser in self._parser_list:
            parser.parse_files(filenames, events_data=self._events_data)
예제 #6
0
    def get_single_train_task(self, plugin_name, train_id):
        """
        get single train task.

        Args:
            plugin_name (str): Plugin name, refer `PluginNameEnum`.
            train_id (str): Specify a training job to query.

        Returns:
            {'train_jobs': list[TrainJob]}, refer to restful api.
        """
        Validation.check_param_empty(plugin_name=plugin_name,
                                     train_id=train_id)
        Validation.check_plugin_name(plugin_name=plugin_name)
        train_job = self._data_manager.get_train_job_by_plugin(
            train_id=train_id, plugin_name=plugin_name)
        if train_job is None:
            raise exceptions.SummaryLogPathInvalid()
        return dict(train_jobs=[train_job])
예제 #7
0
    def load(self, computing_resource_mgr):
        """Load the data when loader is exist.

        Args:
            computing_resource_mgr (ComputingResourceManager): The ComputingResourceManager instance.
        """

        if self._loader is None:
            ms_dataloader = MSDataLoader(self._summary_dir)
            loaders = [ms_dataloader]
            for loader in loaders:
                if loader.filter_valid_files():
                    self._loader = loader
                    break

            if self._loader is None:
                logger.warning(
                    "No valid files can be loaded, summary_dir: %s.",
                    self._summary_dir)
                raise exceptions.SummaryLogPathInvalid()

        self._loader.load(computing_resource_mgr)
예제 #8
0
    def load(self, computing_resource_mgr):
        """
        Load all log valid files.

        When the file is reloaded, it will continue to load from where it left off.

        Args:
            computing_resource_mgr (ComputingResourceManager): The ComputingResourceManager instance.
        """
        logger.debug("Start to load data in ms data loader.")
        filenames = self.filter_valid_files()
        if not filenames:
            logger.warning("No valid files can be loaded, summary_dir: %s.",
                           self._summary_dir)
            raise exceptions.SummaryLogPathInvalid()
        old_filenames = list(self._valid_filenames)
        self._valid_filenames = filenames
        self._check_files_deleted(filenames, old_filenames)

        with computing_resource_mgr.get_executor() as executor:
            for parser in self._parser_list:
                parser.parse_files(executor,
                                   filenames,
                                   events_data=self._events_data)