def doCompute(): endpoint_name = "endpoint-from-saved-model" predictor = TensorFlowPredictor(endpoint_name) myfile = open( r"C:\Users\Polash-Dell\coder_guy\PycharmProjects\aws_dl\1_aws_dl.txt", 'w+') myfile.write("Norm Amp : Porosity") while True: xa.doInput() x_test_list = xa.Input['Input'][0, 0, :].tolist() x_test = pd.DataFrame(x_test_list) stats = x_test.describe() stats = stats.transpose() x_test = (x_test - stats['mean']) / stats['std'] for a in x_test.values: myfile.write(str(a) + ": ") try: test_predictions = predictor.predict(a) myfile.write(str(test_predictions['predictions'][0][0]) + "\n") except: e = sys.exc_info()[0] myfile.write("\nError:" + str(e)) xa.Output['Output'] = xa.Input['Input'] xa.doOutput() myfile.close()
def doCompute(): endpoint_name = "endpoint-from-saved-model" predictor = TensorFlowPredictor(endpoint_name) while True: xa.doInput() x_test_list = xa.Input['Input'][0, 0, :].tolist() x_test = pd.DataFrame(x_test_list) stats = x_test.describe() stats = stats.transpose() x_test = (x_test - stats['mean']) / stats['std'] y_test = predictor.predict(np.array(x_test)) xa.Output['Output'] = np.array(y_test['predictions']) xa.doOutput()
def test_predictor_csv(sagemaker_session): predictor = TensorFlowPredictor("endpoint", sagemaker_session, serializer=CSVSerializer()) mock_response(json.dumps(PREDICT_RESPONSE).encode("utf-8"), sagemaker_session) result = predictor.predict([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]) assert_invoked( sagemaker_session, EndpointName="endpoint", ContentType=CSV_CONTENT_TYPE, Accept=JSON_CONTENT_TYPE, Body="1.0,2.0,3.0\n4.0,5.0,6.0", ) assert PREDICT_RESPONSE == result
def test_predictor(sagemaker_session): predictor = TensorFlowPredictor("endpoint", sagemaker_session) mock_response(json.dumps(PREDICT_RESPONSE).encode("utf-8"), sagemaker_session) result = predictor.predict(PREDICT_INPUT) assert_invoked( sagemaker_session, EndpointName="endpoint", ContentType=JSON_CONTENT_TYPE, Accept=JSON_CONTENT_TYPE, Body=json.dumps(PREDICT_INPUT), ) assert PREDICT_RESPONSE == result
def test_predictor_classify(sagemaker_session): predictor = TensorFlowPredictor("endpoint", sagemaker_session) mock_response(json.dumps(CLASSIFY_RESPONSE).encode("utf-8"), sagemaker_session) result = predictor.classify(CLASSIFY_INPUT) assert_invoked_with_body_dict( sagemaker_session, EndpointName="endpoint", ContentType=JSON_CONTENT_TYPE, Accept=JSON_CONTENT_TYPE, CustomAttributes="tfs-method=classify", Body=json.dumps(CLASSIFY_INPUT), ) assert CLASSIFY_RESPONSE == result
def home(): def np2csv(arr): csv = io.BytesIO() numpy.savetxt(csv, arr, delimiter=',', fmt='%g') return csv.getvalue().decode().rstrip() mynumber = request.args.getlist('image') predictor = TensorFlowPredictor('xgboost-001') mynumberarray = ast.literal_eval(mynumber[0]) payload = np2csv(mynumberarray) runtime = boto3.Session().client('runtime.sagemaker') #response = runtime.invoke_endpoint(EndpointName='DEMO-XGBoostEndpoint-2018-04-20-03-07-34', # ContentType='text/csv', # Body=payload) response = runtime.invoke_endpoint(EndpointName='xgboost-001', ContentType='text/csv', Body=payload) result = response['Body'].read().decode('ascii') floatArr = numpy.array(json.loads(result)) predictedLabel = numpy.argmax(floatArr) answer = ("Most likely answer: {}".format(predictedLabel)) return (answer)
def tensor(): mynumber = request.args.getlist('image') predictor = TensorFlowPredictor( 'sagemaker-tensorflow-2018-04-15-02-09-49-209') mynumberarray = ast.literal_eval(mynumber[0]) response = predictor.predict(mynumberarray) prediction = response['outputs']['classes']['int64Val'][0] answer = ("Most likely answer: {}".format(prediction)) return (answer)
def test_predictor_regress(sagemaker_session): predictor = TensorFlowPredictor( "endpoint", sagemaker_session, model_name="model", model_version="123" ) mock_response(json.dumps(REGRESS_RESPONSE).encode("utf-8"), sagemaker_session) result = predictor.regress(REGRESS_INPUT) assert_invoked_with_body_dict( sagemaker_session, EndpointName="endpoint", ContentType=JSON_CONTENT_TYPE, Accept=JSON_CONTENT_TYPE, CustomAttributes="tfs-method=regress,tfs-model-name=model,tfs-model-version=123", Body=json.dumps(REGRESS_INPUT), ) assert REGRESS_RESPONSE == result
def test_predictor_jsons(sagemaker_session): predictor = TensorFlowPredictor( "endpoint", sagemaker_session, serializer=IdentitySerializer(content_type="application/jsons"), ) mock_response(json.dumps(PREDICT_RESPONSE).encode("utf-8"), sagemaker_session) result = predictor.predict("[1.0, 2.0, 3.0]\n[4.0, 5.0, 6.0]") assert_invoked( sagemaker_session, EndpointName="endpoint", ContentType="application/jsons", Accept=JSON_CONTENT_TYPE, Body="[1.0, 2.0, 3.0]\n[4.0, 5.0, 6.0]", ) assert PREDICT_RESPONSE == result
def main(): parser = argparse.ArgumentParser() parser.add_argument('endpoint_name') parser.add_argument('--data_dir', default='data/') parser.add_argument('--mnist_index', '-i', type=int, default=0) args = parser.parse_args() predictor = TensorFlowPredictor(args.endpoint_name) image = get_mnist_data(args.data_dir, index=args.mnist_index) inputs = {'instances': image} t = time.time() outputs = predictor.predict(inputs) print(f'inference time: {(time.time() - t) * 1000:.2f} ms') prediction = np.array(outputs['predictions'][0]) pred_label = np.argmax(prediction) pred_confidence = np.max(prediction) print(f'prediction: {pred_label} ({pred_confidence * 100:.1f}%)')
def main(): parser = argparse.ArgumentParser() parser.add_argument('endpoint_name') args = parser.parse_args() predictor = TensorFlowPredictor(args.endpoint_name) predictor.delete_model() predictor.delete_endpoint()
def predictor_from_config_maybe(self): endpoint = AppConfig.setting('AWS_DNN_PREDICTOR_ENDPOINT') return TensorFlowPredictor(endpoint) if endpoint not in (None, '') else None
import pandas as pd endpoint_name = "endpoint-from-saved-model" from sagemaker.tensorflow import TensorFlowPredictor predictor = TensorFlowPredictor(endpoint_name) x_test = pd.read_csv('seismic_poro_test.csv') stats = x_test.describe() stats.pop('porosity') stats = stats.transpose() x_test = (x_test - stats['mean']) / stats['std'] x_test.pop('porosity') for a in x_test.values: test_predictions = predictor.predict(a) print(test_predictions['predictions'][0][0])
from sagemaker.tensorflow import TensorFlowPredictor print(' ## Building predictor ') tensor_name = 'SageMakerEndpoint' predictor = TensorFlowPredictor(tensor_name) print(' ### Executing prediction ') features_data = [ 34.49726772511229, 12.65565114916675, 39.57766801952616, 4.0826206329529615 ] predictions = predictor.predict([features_data]) print(predictions)
def test_predictor_classify_bad_content_type(sagemaker_session): predictor = TensorFlowPredictor("endpoint", sagemaker_session, CSVSerializer()) with pytest.raises(ValueError): predictor.classify(CLASSIFY_INPUT)
def test_predictor_regress_bad_content_type(sagemaker_session): predictor = TensorFlowPredictor("endpoint", sagemaker_session, CSVSerializer()) with pytest.raises(ValueError): predictor.regress(REGRESS_INPUT)
import sagemaker from sagemaker.tensorflow import TensorFlowPredictor import numpy as np from tensorflow.python.keras.preprocessing.image import load_img remove_endpoint = False endpoint_name = "[YOUR ENDPOINT]" predictor = TensorFlowPredictor(endpoint_name) cat_image = load_img("./tmp/PetImages/Cat/2282.jpg", target_size=(128, 128)) cat_image_array = np.array(cat_image).reshape((1, 128, 128, 3)) print(predictor.predict({ "inputs_input": cat_image_array})) if remove_endpoint: sagemaker.Session().delete_endpoint(predictor.endpoint)
np_im.shape imshow(np_im) # Image reshape for keras tf input 1x4 np_new_im = np_new_im[np.newaxis, ...] np_new_im.shape # predict inference response = predictor.predict({'inputs_input': np_new_im}) response # predict with existing endpoint import json from sagemaker.tensorflow import TensorFlowPredictor predictor = TensorFlowPredictor('sagemaker-tensorflow-2019-07-17-03-09-42-707') result = predictor.predict({'inputs_input': np_new_im}) # Cifar 10 class # 0 : airplane # 1 : automobile # 2 : bird # 3 : cat # 4 : deer # 5 : dog # 6 : frog # 7 : horse # 8 : ship # 9 : truck object_categories = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck'] print(object_categories[int(predict_class)])