def _send_cache_wl_requester_response(self, subject, dst, requesters, trace): self.send_message( dst, Message(self._env.now, subject, self._id, dst, MessageType.CACHE_WL_REQUESTER_RESP, 0, requesters, trace + [(self._id, self._env.now - subject.timestamp)]))
def _send_cache_insert_response(self, subject, dst, node_id, item, trace): self.send_message( dst, Message(self._env.now, subject, self._id, dst, MessageType.CACHE_INSERT_RESP, 0, dict(node=node_id, item=item), trace + [(self._id, self._env.now - subject.timestamp)]))
def _send_cache_retrieve_response(self, subject, dst, node_id, delay, trace): self.send_message( dst, Message(self._env.now, subject, self._id, dst, MessageType.CACHE_RETRIEVE_RESP, 0, dict(node=node_id, delay=delay), trace + [(self._id, self._env.now - subject.timestamp)]))
def _listen_incoming_tasks(self): from cache.base import CacheItem while self.is_alive: pkt = yield self._data.get() msg, task = pkt.msg, pkt.msg.data yield self._env.process(self.run_task(task)) if (self._env.now - msg.timestamp) > task.exec_time * 1.1: self.logger.info('Runtime: %f, time elapsed: %f'%(task.exec_time, self._env.now - msg.timestamp)) self.send(msg.src, Message(self._env.now, msg.subject, self._id, msg.src, MessageType.TASK_EXEC_RESP, task.output.size, CacheItem(task.id, task), msg.trace + [(self._id, self._env.now - msg.subject.timestamp)]))
def _send_request(self): if len(self._neighbors) < 1: raise Exception('{} is not connected to any other node'.format(self._id)) gateway = next(iter(self._neighbors.keys())) for i, t in enumerate(self._tasks): timestamp = self._env.now subject = Subject(timestamp, self._id, self._dest, t) msg = Message(timestamp, subject, self._id, self._dest, MessageType.TASK_EXEC_REQ, t.input.size, t, [(self._id, 0)]) self._pending_requests[subject] = msg self.send(gateway, msg) # self.logger.info('Client {}: sent {} requests'.format(self._id, i)) if self._req_int_lam: req_int = self._random_state.poisson(self._req_int_lam) self.logger.debug('Request interval: {}'.format(req_int)) yield self._env.timeout(req_int) self._is_finished = True self.logger.info('Client {} finishes submitting tasks'.format(self._id))
def _send_cache_metadata_request(self, subject, key, trace): dst = self._network.master.id self.send_message(dst, Message(self._env.now, subject, self._id, dst, MessageType.CACHE_METADATA_REQ, 0, key, trace + [(self._id, self._env.now - subject.timestamp)]))
def _send_cache_remove_request(self, key): dst = self._network.master.id self.send_message(dst, Message(self._env.now, None, self._id, dst, MessageType.CACHE_REMOVE_REQ, 0, dict(node=self._id, key=key), [self._id]))
def _send_master_insert_request(self, subject, dst, item, trace): self.send(dst, Message(self._env.now, subject, self._id, dst, MessageType.MASTER_INSERT_REQ, item.size, item, trace + [(self._id, self._env.now - subject.timestamp)]))
def _send_cache_insert_ack(self, subject, item, status, trace): dst = self._network.master.id self.send_message(dst, Message(self._env.now, subject, self._id, dst, MessageType.CACHE_INSERT_ACK, 0, dict(item=item, node=self._id, status=status), trace + [(self._id, self._env.now - subject.timestamp)]))
def _send_cache_insert_request(self, subject, item, trace): dst = self._network.master.id self.send_message(dst, Message(self._env.now, subject, self._id, dst, MessageType.CACHE_INSERT_REQ, 0, item, trace + [(self._id, self._env.now - subject.timestamp)]))
def _send_task_exec_response(self, subject, dst, size, item, trace): self.send(dst, Message(self._env.now, subject, self._id, dst, MessageType.TASK_EXEC_RESP, size, item, trace + [(self._id, self._env.now - subject.timestamp)]))
def _send_cache_transfer_response(self, subject, dst, size, item, trace): return self.send(dst, Message(self._env.now, subject, self._id, dst, MessageType.CACHE_TRANSFER_RESP, size, item, trace + [(self._id, self._env.now - subject.timestamp)]), subject.data.exec_time)
def _send_cache_wl_add_response(self, subject, dst, lr, trace): self.send_message( dst, Message(self._env.now, subject, self._id, dst, MessageType.CACHE_WL_ADD_RESP, 0, lr, trace + [(self._id, self._env.now - subject.timestamp)]))
def _send_cache_metadata_response(self, subject, dst, meta, trace): self.send_message( dst, Message(self._env.now, subject, self._id, dst, MessageType.CACHE_METADATA_RESP, 0, meta, trace + [(self._id, self._env.now - subject.timestamp)]))
def _send_cache_update_request(self, dst, item, trace): self.send_message( dst, Message(self._env.now, None, self._id, dst, MessageType.CACHE_UDPATE_REQ, 0, item, trace + [self._id]))
def _send_cache_wl_request_request(self, subject, key, trace): dst = self._network.master.id self.send_message(dst, Message(self._env.now, subject, self._id, dst, MessageType.CACHE_WL_REQUEST_REQ, 0, dict(key=key, node=self._id), trace + [(self._id, self._env.now - subject.timestamp)]))
def _send_cache_wl_clear_request(self, subject, key, trace): dst = self._network.master.id self.send_message(dst, Message(self._env.now, subject, self._id, dst, MessageType.CACHE_WL_CLEAR_REQ, 0, key, trace + [(self._id, self._env.now - subject.timestamp)]))
def _send_cache_transfer_declined_message(self, subject, dst, trace): self.send_message(dst, Message(self._env.now, subject, self._id, dst, MessageType.CACHE_TRANSFER_DECLINE, 0, None, trace + [(self._id, self._env.now - subject.timestamp)]))
def _send_cache_transfer_request(self, subject, dst, item, trace): self.send_message(dst, Message(self._env.now, subject, self._id, dst, MessageType.CACHE_TRANSFER_REQ, 0, item, trace + [(self._id, self._env.now - subject.timestamp)]))
def _send_cache_redirect_message(self, subject, dst, trace): self.send_message(dst, Message(self._env.now, subject, self._id, dst, MessageType.CACHE_REDIRECT, 0, None, trace + [(self._id, self._env.now - subject.timestamp)]))