def evaluate_tflite( self, tflite_filepath: str, data: object_detector_dataloader.DataLoader) -> Dict[str, float]: """Evaluate the TFLite model.""" ds = data.gen_dataset(self.model_spec, batch_size=1, is_training=False) return self.model_spec.evaluate_tflite(tflite_filepath, ds, len(data), data.annotations_json_file)
def _get_dataset_and_steps( self, data: object_detector_dataloader.DataLoader, batch_size: int, is_training: bool, ) -> Tuple[Optional[tf.data.Dataset], int, Optional[str]]: """Gets dataset, steps and annotations json file.""" if not data: return None, 0, None # TODO(b/171449557): Put this into DataLoader. dataset = data.gen_dataset( self.model_spec, batch_size, is_training=is_training) steps = len(data) // batch_size return dataset, steps, data.annotations_json_file
def evaluate(self, data: object_detector_dataloader.DataLoader, batch_size: Optional[int] = None) -> Dict[str, float]: """Evaluates the model.""" batch_size = batch_size if batch_size else self.model_spec.batch_size # Not to drop the smaller batch to evaluate the whole dataset. self.model_spec.config.drop_remainder = False ds = data.gen_dataset(self.model_spec, batch_size, is_training=False) steps = (len(data) + batch_size - 1) // batch_size # TODO(b/171449557): Upstream this to the parent class. if steps <= 0: raise ValueError('The size of the validation_data (%d) couldn\'t be ' 'smaller than batch_size (%d). To solve this problem, ' 'set the batch_size smaller or increase the size of the ' 'validation_data.' % (len(data), batch_size)) eval_metrics = self.model_spec.evaluate(self.model, ds, steps, data.annotations_json_file) # Set back drop_remainder=True since it must be True during training. # Otherwise it will fail. self.model_spec.config.drop_remainder = True return eval_metrics