def Run(self, args): results = local_utils.RunPredict(args.model_dir, args.json_instances, args.text_instances) if not args.IsSpecified('format'): # default format is based on the response. if isinstance(results, list): predictions = results else: predictions = results.get('predictions') args.format = predict_utilities.GetDefaultFormat(predictions) return results
def Run(self, args): framework = flags.FRAMEWORK_MAPPER.GetEnumForChoice(args.framework) framework_flag = framework.name.lower() if framework else 'tensorflow' results = local_utils.RunPredict(args.model_dir, args.json_instances, args.text_instances, framework=framework_flag) if not args.IsSpecified('format'): # default format is based on the response. if isinstance(results, list): predictions = results else: predictions = results.get('predictions') args.format = predict_utilities.GetDefaultFormat(predictions) return results
def Run(self, args): framework = flags.FRAMEWORK_MAPPER.GetEnumForChoice(args.framework) framework_flag = framework.name.lower() if framework else 'tensorflow' if args.signature_name is None: log.status.Print('If the signature defined in the model is ' 'not serving_default then you must specify it via ' '--signature-name flag, otherwise the command may fail.') results = local_utils.RunPredict( args.model_dir, json_instances=args.json_instances, text_instances=args.text_instances, framework=framework_flag, signature_name=args.signature_name) if not args.IsSpecified('format'): # default format is based on the response. if isinstance(results, list): predictions = results else: predictions = results.get('predictions') args.format = predict_utilities.GetDefaultFormat(predictions) return results