async def send_ssh_job_info(self, job_id: BackendJobId, host: str, port: int, key: str): """ Send info about the SSH debug connection to the backend/client. Must be called *at most once* for each job. :exception JobNotRunningException: is raised when the job is not running anymore (send_job_result already called) :exception TooManyCallsException: is raised when this function has been called more than once """ if job_id not in self.__running_job: raise JobNotRunningException() if self.__running_job[job_id]: raise TooManyCallsException() self.__running_job[job_id] = True # now we have sent ssh info await ZMQUtils.send(self.__backend_socket, AgentJobSSHDebug(job_id, host, port, key))
async def handle_running_container(self, job_id, container_id, inputdata, debug, ssh_port, orig_env, orig_memory_limit, orig_time_limit, orig_hard_time_limit, sockets_path, student_path, systemfiles_path, future_results): """ Talk with a container. Sends the initial input. Allows to start student containers """ sock = await self._loop.run_in_executor( None, lambda: self._docker.attach_to_container(container_id)) try: read_stream, write_stream = await asyncio.open_connection( sock=sock.get_socket()) except: self._logger.exception( "Exception occurred while creating read/write stream to container" ) return None # Send hello msg await self._write_to_container_stdin(write_stream, { "type": "start", "input": inputdata, "debug": debug }) buffer = bytearray() try: while not read_stream.at_eof(): msg_header = await read_stream.readexactly(8) outtype, length = struct.unpack_from( '>BxxxL', msg_header ) # format imposed by docker in the attach endpoint if length != 0: content = await read_stream.readexactly(length) if outtype == 1: # stdout buffer += content if outtype == 2: # stderr self._logger.debug( "Received stderr from containers:\n%s", content) # 4 first bytes are the lenght of the message. If we have a complete message... while len(buffer) > 4 and len(buffer) >= 4 + struct.unpack( 'I', buffer[0:4])[0]: msg_encoded = buffer[ 4:4 + struct.unpack('I', buffer[0:4])[0]] # ... get it buffer = buffer[ 4 + struct.unpack('I', buffer[0:4]) [0]:] # ... withdraw it from the buffer try: msg = msgpack.unpackb(msg_encoded, encoding="utf8", use_list=False) self._logger.debug( "Received msg %s from container %s", msg["type"], container_id) if msg["type"] == "run_student": # start a new student container environment = msg["environment"] or orig_env memory_limit = min( msg["memory_limit"] or orig_memory_limit, orig_memory_limit) time_limit = min( msg["time_limit"] or orig_time_limit, orig_time_limit) hard_time_limit = min( msg["hard_time_limit"] or orig_hard_time_limit, orig_hard_time_limit) share_network = msg["share_network"] socket_id = msg["socket_id"] assert "/" not in socket_id # ensure task creator do not try to break the agent :-( self._loop.create_task( self.create_student_container( job_id, container_id, sockets_path, student_path, systemfiles_path, socket_id, environment, memory_limit, time_limit, hard_time_limit, share_network, write_stream)) elif msg["type"] == "ssh_key": # send the data to the backend (and client) self._logger.info( "%s %s", self.running_ssh_debug[container_id], str(msg)) await ZMQUtils.send( self._backend_socket, AgentJobSSHDebug(job_id, self.ssh_host, ssh_port, msg["ssh_key"])) elif msg["type"] == "result": # last message containing the results of the container future_results.set_result(msg["result"]) write_stream.close() sock.close_socket() return # this is the last message except: self._logger.exception( "Received incorrect message from container %s (job id %s)", container_id, job_id) future_results.set_result(None) write_stream.close() sock.close_socket() return except asyncio.IncompleteReadError: self._logger.debug( "Container output ended with an IncompleteReadError; It was probably killed." ) except: self._logger.exception( "Exception while reading container %s output", container_id) # EOF without result :-( self._logger.warning("Container %s has not given any result", container_id) write_stream.close() sock.close_socket() future_results.set_result(None)