def get_all(self, limit=None, options: dict = None): if options is None: options = {} if limit is not None and not isinstance(limit, int) and limit <= 0: raise ValueError(f"limit:{limit} must be positive int") job_id = generate_job_id(self.__session_id, RollPair.GET_ALL) er_pair = ErPair(key=create_serdes(self.__store._store_locator._serdes) .serialize(limit) if limit is not None else None, value=None) def send_command(): job = ErJob(id=job_id, name=RollPair.GET_ALL, inputs=[self.__store], outputs=[self.__store], functors=[ErFunctor(name=RollPair.GET_ALL, body=cloudpickle.dumps(er_pair))]) task_results = self._run_job(job=job) er_store = self.__get_output_from_result(task_results) return er_store send_command() populated_store = self.ctx.populate_processor(self.__store) transfer_pair = TransferPair(transfer_id=job_id) done_cnt = 0 for k, v in transfer_pair.gather(populated_store): done_cnt += 1 yield self.key_serdes.deserialize(k), self.value_serdes.deserialize(v) L.trace(f"get_all: namespace={self.get_namespace()} name={self.get_name()}, count={done_cnt}")
def get_all(self, options: dict = None): if options is None: options = {} L.info('get all functor') job_id = generate_job_id(self.__session_id, RollPair.GET_ALL) def send_command(): job = ErJob(id=job_id, name=RollPair.GET_ALL, inputs=[self.__store], outputs=[self.__store], functors=[]) result = self.__command_client.simple_sync_send( input=job, output_type=ErJob, endpoint=self.ctx.get_roll()._command_endpoint, command_uri=CommandURI( f'{RollPair.ROLL_PAIR_URI_PREFIX}/{RollPair.RUN_JOB}'), serdes_type=SerdesTypes.PROTOBUF) return result send_command() populated_store = self.ctx.populate_processor(self.__store) transfer_pair = TransferPair(transfer_id=job_id) done_cnt = 0 for k, v in transfer_pair.gather(populated_store): done_cnt += 1 yield self.key_serdes.deserialize( k), self.value_serdes.deserialize(v) L.debug(f"get_all count:{done_cnt}")