示例#1
0
    def list_explain_directories(self, summary_base_dir, offset=0, limit=None):
        """
        List explain directories within base directory.

        Args:
            summary_base_dir (str): Path of summary base directory.
            offset (int): An offset for page. Ex, offset is 0, mean current page is 1. Default value is 0.
            limit (int): The max data items for per page. Default value is 10.

        Returns:
            tuple[total, directories], total indicates the overall number of explain directories and directories
                    indicate list of summary directory info including the following attributes.
                - relative_path (str): Relative path of summary directory, referring to settings.SUMMARY_BASE_DIR,
                                        starting with "./".
                - create_time (datetime): Creation time of summary file.
                - update_time (datetime): Modification time of summary file.

        Raises:
            ParamValueError, if offset < 0 or limit is out of valid value range.
            ParamTypeError, if offset or limit is not valid integer.

        Examples:
            >>> from mindinsight.datavisual.data_transform.summary_watcher import SummaryWatcher
            >>> summary_watcher = SummaryWatcher()
            >>> total, directories = summary_watcher.list_explain_directories('/summary/base/dir', offset=0, limit=10)
        """
        offset = Validation.check_offset(offset=offset)
        limit = Validation.check_limit(limit, min_value=1, max_value=999, default_value=None)

        directories = self.list_summary_directories(summary_base_dir, overall=False, list_explain=True)
        if limit is None:
            return len(directories), directories

        return len(directories), directories[offset * limit:(offset + 1) * limit]
示例#2
0
def query_explain_jobs():
    """
    Query explain jobs.

    Returns:
        Response, contains dict that stores base directory, total number of jobs and their detailed job metadata.

    Raises:
        ParamMissError: If train_id info is not in the request.
        ParamTypeError: If one of (offset, limit) is not integer in the request.
        ParamValueError: If one of (offset, limit) does not have the expected value in the request.
    """
    offset = request.args.get("offset", default=0)
    limit = request.args.get("limit", default=10)
    offset = Validation.check_offset(offset=offset)
    limit = Validation.check_limit(
        limit, min_value=1, max_value=SummaryWatcher.MAX_SUMMARY_DIR_COUNT)

    encapsulator = ExplainJobEncap(EXPLAIN_MANAGER)
    total, jobs = encapsulator.query_explain_jobs(offset, limit)

    return jsonify({
        'name':
        os.path.basename(os.path.realpath(settings.SUMMARY_BASE_DIR)),
        'total':
        total,
        'explain_jobs':
        jobs,
    })
示例#3
0
def _get_query_sample_parameters(data):
    """
    Get parameter for query.

    Args:
        data (dict): Dict that contains request info.

    Returns:
        dict, key-value pairs to call backend query functions.

    Raises:
        ParamMissError: If train_id info is not in the request.
        ParamTypeError: If certain key is not in the expected type in the request.
        ParamValueError: If certain key does not have the expected value in the request.
    """

    train_id = data.get("train_id")
    if train_id is None:
        raise ParamMissError('train_id')

    labels = data.get("labels")
    if labels is not None:
        _validate_type(labels, "labels", list)
    if labels:
        for item in labels:
            _validate_type(item, "element of labels", str)

    limit = data.get("limit", 10)
    limit = Validation.check_limit(limit, min_value=1, max_value=100)
    offset = data.get("offset", 0)
    offset = Validation.check_offset(offset=offset)
    sorted_name = data.get("sorted_name", "")
    _validate_value(sorted_name, "sorted_name",
                    ('', 'confidence', 'uncertainty'))

    sorted_type = data.get("sorted_type", "descending")
    _validate_value(sorted_type, "sorted_type", ("ascending", "descending"))

    prediction_types = data.get("prediction_types")
    if prediction_types is not None:
        _validate_type(prediction_types, "element of labels", list)
    if prediction_types:
        for item in prediction_types:
            _validate_value(item, "element of prediction_types",
                            ('TP', 'FN', 'FP'))

    query_kwarg = {
        "train_id": train_id,
        "labels": labels,
        "limit": limit,
        "offset": offset,
        "sorted_name": sorted_name,
        "sorted_type": sorted_type,
        "prediction_types": prediction_types
    }
    return query_kwarg
    def search_node_names(self, search_content, offset, limit):
        """
        Search node names by search content.

        Args:
            search_content (Any): This content can be the key content of the node to search.
            offset (int): An offset for page. Ex, offset is 0, mean current page is 1.
            limit (int): The max data items for per page.

        Returns:
            TypedDict('Names', {'names': list[str]}), {"names": ["node_names"]}.
        """
        offset = Validation.check_offset(offset=offset)
        limit = Validation.check_limit(limit, min_value=1, max_value=1000)
        names = self._graph.search_node_names(search_content, offset, limit)
        return {"names": names}
def query_train_jobs():
    """Query train jobs."""
    offset = request.args.get("offset", default=0)
    limit = request.args.get("limit", default=10)

    offset = Validation.check_offset(offset=offset)
    limit = Validation.check_limit(limit, min_value=1, max_value=SummaryWatcher.MAX_SUMMARY_DIR_COUNT)

    processor = TrainTaskManager(DATA_MANAGER)
    total, train_jobs = processor.query_train_jobs(offset, limit)

    return jsonify({
        'name': os.path.basename(os.path.realpath(settings.SUMMARY_BASE_DIR)),
        'total': total,
        'train_jobs': train_jobs,
    })
示例#6
0
def query_explain_jobs():
    """Query explain jobs."""
    offset = request.args.get("offset", default=0)
    limit = request.args.get("limit", default=10)
    offset = Validation.check_offset(offset=offset)
    limit = Validation.check_limit(
        limit, min_value=1, max_value=SummaryWatcher.MAX_SUMMARY_DIR_COUNT)

    encapsulator = ExplainJobEncap(EXPLAIN_MANAGER)
    total, jobs = encapsulator.query_explain_jobs(offset, limit)

    return jsonify({
        'name':
        os.path.basename(os.path.realpath(settings.SUMMARY_BASE_DIR)),
        'total':
        total,
        'explain_jobs':
        jobs,
    })
示例#7
0
    def search_node_names(self, search_content, offset, limit):
        """
        Search node names by search content.

        Args:
            search_content (Any): This content can be the key content of the node to search.
            offset (int): An offset for page. Ex, offset is 0, mean current page is 1.
            limit (int): The max data items for per page.

        Returns:
            Dict, the searched nodes.
        """
        offset = Validation.check_offset(offset=offset)
        limit = Validation.check_limit(limit, min_value=1, max_value=1000)
        nodes = self._graph.search_nodes_by_pattern(search_content)
        real_offset = offset * limit
        search_nodes = self._graph.get_nodes(nodes[real_offset:real_offset +
                                                   limit])

        return {"nodes": search_nodes}
示例#8
0
def query_saliency():
    """Query saliency map related results."""

    data = _read_post_request(request)

    train_id = data.get("train_id")
    if train_id is None:
        raise ParamMissError('train_id')

    labels = data.get("labels")
    explainers = data.get("explainers")
    limit = data.get("limit", 10)
    limit = Validation.check_limit(limit, min_value=1, max_value=100)
    offset = data.get("offset", 0)
    offset = Validation.check_offset(offset=offset)
    sorted_name = data.get("sorted_name", "")
    sorted_type = data.get("sorted_type", "descending")

    if sorted_name not in ("", "confidence", "uncertainty"):
        raise ParamValueError(
            f"sorted_name: {sorted_name}, valid options: '' 'confidence' 'uncertainty'"
        )
    if sorted_type not in ("ascending", "descending"):
        raise ParamValueError(
            f"sorted_type: {sorted_type}, valid options: 'confidence' 'uncertainty'"
        )

    encapsulator = SaliencyEncap(_image_url_formatter, EXPLAIN_MANAGER)
    count, samples = encapsulator.query_saliency_maps(train_id=train_id,
                                                      labels=labels,
                                                      explainers=explainers,
                                                      limit=limit,
                                                      offset=offset,
                                                      sorted_name=sorted_name,
                                                      sorted_type=sorted_type)

    return jsonify({"count": count, "samples": samples})