def _infer_model_as_iterable( self, checkpoint_path, predictions, feed_fn, return_dict): if feed_fn is None: feed_dicts = itertools.repeat(None) else: def _feed_fn(): while True: yield feed_fn() feed_dicts = _feed_fn() try: for output_batch in graph_actions.run_feeds_iter( output_dict=predictions, feed_dicts=feed_dicts, restore_checkpoint_path=checkpoint_path): # Unpack batches into individual predictions if return_dict: batch_length = list(output_batch.values())[0].shape[0] for i in range(batch_length): yield {key: value[i] for key, value in output_batch.items()} else: for pred in output_batch['predictions']: yield pred except errors.OutOfRangeError: # We fall out of the above loop naturally if feed_fn raises StopIteration, # or we catch an OutOfRangeError if we've reached the end of inputs. logging.info('Reached end of inputs for predict_iter.')
def _infer_model_as_iterable(self, checkpoint_path, predictions, feed_fn, return_dict): if feed_fn is None: feed_dicts = itertools.repeat(None) else: def _feed_fn(): while True: yield feed_fn() feed_dicts = _feed_fn() try: for output_batch in graph_actions.run_feeds_iter( output_dict=predictions, feed_dicts=feed_dicts, restore_checkpoint_path=checkpoint_path): # Unpack batches into individual predictions if return_dict: batch_length = list(output_batch.values())[0].shape[0] for i in range(batch_length): yield { key: value[i] for key, value in output_batch.items() } else: for pred in output_batch['predictions']: yield pred except errors.OutOfRangeError: # We fall out of the above loop naturally if feed_fn raises StopIteration, # or we catch an OutOfRangeError if we've reached the end of inputs. logging.info('Reached end of inputs for predict_iter.')