Exemple #1
0
    def SendMultiGraphs(self, request_iterator, context):
        """Send graph into DebuggerCache."""
        log.info("Received multi_graphs.")
        reply = get_ack_reply()
        if self._status == ServerStatus.MISMATCH:
            log.info(
                "Mindspore and Mindinsight is unmatched, waiting for user to terminate the service."
            )
            return reply
        serial_graph = b""
        graph_dict = {}
        for chunk in request_iterator:
            serial_graph += chunk.buffer
            if chunk.finished:
                sub_graph = GraphProto.FromString(serial_graph)
                graph_dict[sub_graph.name] = sub_graph
                log.debug("Deserialize the graph %s. Receive %s nodes",
                          sub_graph.name, len(sub_graph.node))
                serial_graph = b""
                self._cache_store.get_stream_handler(
                    Streams.TENSOR).put_const_vals(sub_graph.const_vals)

        self._cache_store.get_stream_handler(Streams.GRAPH).put(graph_dict)
        self._record_parameter_names()
        self._status = ServerStatus.RECEIVE_GRAPH
        log.debug("Send the reply for graph.")
        return reply
Exemple #2
0
 def SendGraph(self, request_iterator, context):
     """Send graph into DebuggerCache."""
     log.info("Received graph.")
     serial_graph = b""
     for chunk in request_iterator:
         serial_graph += chunk.buffer
     graph = GraphProto.FromString(serial_graph)
     log.debug("Deserialize the graph. Receive %s nodes", len(graph.node))
     self._cache_store.get_stream_handler(Streams.GRAPH).put(graph)
     self._cache_store.get_stream_handler(Streams.TENSOR).put_const_vals(graph.const_vals)
     self._status = ServerStatus.RECEIVE_GRAPH
     reply = get_ack_reply()
     log.info("Send the reply for graph.")
     return reply