def main(): from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema import pandas df = pandas.DataFrame(data=[['please add your text here.']], columns=['text']) # Turn on data collection debug mode to view output in stdout os.environ["AML_MODEL_DC_DEBUG"] = 'true' # Test the output of the functions init() input1 = pandas.DataFrame(data=[[ 'The authors report on six cases of famotidine-associated delirium in hospitalized patients who cleared completely upon removal of famotidine.' ]], columns=['text']) print("Result: " + run(input1)) inputs = {"input_df": SampleDefinition(DataTypes.PANDAS, df)} #Generate the schema generate_schema(run_func=run, inputs=inputs, filepath='service-schema.json') print("Schema generated")
def main(): # This is used to define the schema and example df = pandas.DataFrame(data=[["That was a good meal"], ["What poor service"]], columns=["Review"]) print("Initializing") init() print("After Init") print(len(model)) results = run(df) print(results) print(df) inputs = {"input_df": SampleDefinition(DataTypes.PANDAS, df)} # The prepare statement writes the scoring file (main.py) and # the scchema file (service_schema.json) the the output folder. #prepare(run_func=run, init_func=init, input_types=inputs, ) generate_schema(run_func=run, inputs=inputs, filepath='./outputs/service_schema.json') print("Schema generated")
def main(): from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema import pandas df = pandas.DataFrame( data=[[3.0, 3.6, 1.3, 0.25]], columns=['sepal length', 'sepal width', 'petal length', 'petal width']) # Turn on data collection debug mode to view output in stdout os.environ["AML_MODEL_DC_DEBUG"] = 'true' # Test the output of the functions init() input1 = pandas.DataFrame([[3.0, 3.6, 1.3, 0.25]]) print("Result: " + run(input1)) inputs = {"input_df": SampleDefinition(DataTypes.PANDAS, df)} #Genereate the schema generate_schema(run_func=run, inputs=inputs, filepath='./outputs/service_schema.json') print("Schema generated")
def main(): from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema import pandas as pd import json df = pd.DataFrame(data=[[31.6, 4.7, 20.8, 77.7]], columns=['drone_batteryVoltage', \ 'drone_responseTime', 'ambient_temperature', 'ambient_humidity']) # Turn on data collection debug mode to view output in stdout os.environ["AML_MODEL_DC_DEBUG"] = 'true' # Test the output of the functions init() input1 = '{ "drone": { "batteryVoltage": 31.6, "responseTime": 4.7 }, \ "ambient": { "temperature": 20.8, "humidity": 77.7 },\ "timeCreated": "2017-12-27T16:02:09.1933728Z" }' print("Result: " + run(input1)) inputs = {"input_str": SampleDefinition(DataTypes.PANDAS, df)} # Genereate the schema generate_schema(run_func=run, inputs=inputs, filepath='./outputs/service_schema.json') print("Schema generated")
def main(): from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema import pandas df = pandas.DataFrame(data=[['text-value']], columns=['text']) text_data = {} text_data['text'] = [] text_data['text'].append( str('Famotidine is a histamine H2-receptor antagonist used in inpatient settings for prevention of stress ulcers.' )) text_data['text'].append(str('Insulin is used to treat diabetes.')) input1 = pandas.DataFrame.from_dict(text_data) # Test the functions' output init() print("Result: " + run(input1)) inputs = {"input_df": SampleDefinition(DataTypes.PANDAS, df)} # The prepare statement writes the scoring file (main.py) and # the schema file (service_schema.json) the the output folder. generate_schema(run_func=run, inputs=inputs, filepath='service_schema.json') print("Schema generated")
def main(): """WebService unit test and swagger schema generation """ from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema from PIL import Image import json, os # Save labels generate_labels("data/train", "labels.json") print("Labels generated") # Create random image pilImg = Image.fromarray( (numpy.random.rand(224, 224, 3) * 255).astype('uint8')) base64ImgString = pilImgToBase64(pilImg) df = {'base64image': base64ImgString} # Turn on data collection debug mode to view output in stdout os.environ["AML_MODEL_DC_DEBUG"] = 'true' # Test the output of the classification init() run(df) inputs = {"input_df": SampleDefinition(DataTypes.STANDARD, df)} #Genereate the schema generate_schema(run_func=run, inputs=inputs, filepath='./outputs/service_schema.json') print("Schema generated")
def main(): from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema import pandas # Anomaly df = pandas.DataFrame(data=[[33.66995566, 2.44341267, 21.39450979, 26]], columns=['machine_temperature', \ 'machine_pressure','ambient_temperature','ambient_humidity']) # Turn on data collection debug mode to view output in stdout os.environ["AML_MODEL_DC_DEBUG"] = 'true' # Test the output of the functions init() # Anomaly #input1 = '{ "machine": { "temperature": 33.66995566, "pressure": 2.44341267 }, \ # "ambient": { "temperature": 21.39450979, "humidity": 26 },\ # "timeCreated": "2017-10-27T18:14:02.4911177Z" }' # Normal input1 = '{ "machine": { "temperature": 31.16469009, "pressure": 2.158002669 }, \ "ambient": { "temperature": 21.17794693, "humidity": 25 },\ "timeCreated": "2017-10-27T18:14:02.4911177Z" }' print("Result-ilyas: " + str(run(input1))) inputs = {"input_json": SampleDefinition(DataTypes.STANDARD, input1)} generate_schema(run_func=run, inputs=inputs, filepath='./outputs/service_schema.json') print("Schema generated")
def main(): from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema import pandas # temp=45.9842594460449, volt=150.513223075022, rotate=277.294013981084, state=0.0, time=1.0, id=1.0 df = pandas.DataFrame(data=[[45.9842594460449, 150.513223075022, 277.294013981084, 1.0, 1.0], [46.9842594460449, 152.513223075022, 277.294013981084, 2.0, 1.0]], columns=['temp', 'volt','rotate','time', 'id']) print(df.dtypes) # Turn on data collection debug mode to view output in stdout os.environ["AML_MODEL_DC_DEBUG"] = 'true' # Test the output of the functions init() print("past init building inputs?") inputs = {"input_df": SampleDefinition(DataTypes.PANDAS, df)} print("calling run?") res = run(df) print(res) #Genereate the schema generate_schema(run_func=run, inputs=inputs, filepath='./outputs/service_schema.json') print("Schema generated") upload_schema('./outputs/service_schema.json') print("Schema uploaded")
def main(): from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema import pandas # Create random 5x5 pixels image to use as sample input #base64ImgString = "iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAIAAAACDbGyAAAAFElEQVR4nGP8//8/AxJgYkAFpPIB6vYDBxf2tWQAAAAASUVORK5CYII=" #pilImg = pilImread("C:/Users/pabuehle/Desktop/vienna/iris4/tiny.jpg") pilImg = Image.fromarray( (np.random.rand(5, 5, 3) * 255).astype('uint8')) #.convert('RGB') base64ImgString = pilImgToBase64(pilImg) #random 5x5 pixels image # Call init() and run() function init() df = pandas.DataFrame(data=[[base64ImgString]], columns=['image base64 string']) inputs = {"input_df": SampleDefinition(DataTypes.PANDAS, df)} resultString = run(df) print("resultString = " + str(resultString)) # Genereate the schema generate_schema(run_func=run, inputs=inputs, filepath='service_schema.json') print("Schema generated.")
def main(): from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema import pandas as pd # create the outputs folder os.makedirs('./outputs', exist_ok=True) df1 = pd.DataFrame(data=[[ "I absolutely love my bank. There's a reason this bank's customer base is so strong--their customer service actually acts like people and not robots. I love that anytime my card is swiped, I'm instantly notified. And the built in budgeting app is something that really makes life easier. The biggest setback is not being able to deposit cash (you have to get a money order), and if you have another, non-simple bank account, transferring money between accounts can take a few days, which frankly isn't acceptable with most ACH taking a business day or less. Overall, it's a great bank, and I would recommend it to anyone." ]], columns=['review']) # Turn on data collection debug mode to view output in stdout os.environ["AML_MODEL_DC_DEBUG"] = 'true' # Test the output of the functions init() input1 = pd.DataFrame(data=[[ "I absolutely love my bank. There's a reason this bank's customer base is so strong--their customer service actually acts like people and not robots. I love that anytime my card is swiped, I'm instantly notified. And the built in budgeting app is something that really makes life easier. The biggest setback is not being able to deposit cash (you have to get a money order), and if you have another, non-simple bank account, transferring money between accounts can take a few days, which frankly isn't acceptable with most ACH taking a business day or less. Overall, it's a great bank, and I would recommend it to anyone." ]], columns=['review']) print("Result: " + run(input1)) inputs = {"input_df": SampleDefinition(DataTypes.PANDAS, df1)} #Genereate the schema #generate_schema(run_func=run, inputs=inputs, filepath='./outputs/senti_schema2.json') generate_schema(run_func=run, inputs=inputs, filepath='senti_service_schema.json') print("Schema generated")
def main(): from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema import pandas # create the outputs folder os.makedirs('./outputs', exist_ok=True) df = pandas.DataFrame(data=[['i loved the new movie and enjoyed the great acting']], columns=['reviewText']) # Turn on data collection debug mode to view output in stdout os.environ["AML_MODEL_DC_DEBUG"] = 'true' # Test the output of the functions init() input1 = pandas.DataFrame(data=[['i loved the new movie and enjoyed the great acting']], columns=['reviewText']) print("Result: " + run(input1)) inputs = {"input_df": SampleDefinition(DataTypes.PANDAS, df)} #Genereate the schema generate_schema(run_func=run, inputs=inputs, filepath='myschema.json') print("Schema generated")
def main(protocol): try: print("\nPython %s\n" % sys.version) print("IoT Hub Client for Python") hub_manager = HubManager(protocol) print("Starting the IoT Hub Python sample using protocol %s..." % hub_manager.client_protocol) print( "The sample is now waiting for messages and will indefinitely. Press Ctrl-C to exit. " ) while True: time.sleep(1) except IoTHubError as iothub_error: print("Unexpected error %s from IoTHub" % iothub_error) return except KeyboardInterrupt: print("IoTHubModuleClient sample stopped") # Anomaly df = pandas.DataFrame(data=[[33.66995566, 2.44341267, 21.39450979, 26]], columns=[ 'machine_temperature', 'machine_pressure', 'ambient_temperature', 'ambient_humidity' ]) # Turn on data collection debug mode to view output in stdout os.environ["AML_MODEL_DC_DEBUG"] = 'false' # Test the output of the functions init() # Anomaly input1 = '{ "machine": { "temperature": 33.66995566, "pressure": 2.44341267 }, \ "ambient": { "temperature": 21.39450979, "humidity": 26 },\ "timeCreated": "2017-10-27T18:14:02.4911177Z" }' # Normal # input1 = '{ "machine": { "temperature": 31.16469009, "pressure": 2.158002669 }, \ # "ambient": { "temperature": 21.17794693, "humidity": 25 },\ # "timeCreated": "2017-10-27T18:14:02.4911177Z" }' #print("Result: " + run(input1)) inputs = {"input_df": SampleDefinition(DataTypes.PANDAS, df)} generate_schema(run_func=run, inputs=input1, filepath='./outputs/service_schema.json')
def main(): # Load the Azure ML libraries to generate a schema from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema # Test the init and run functions using test data test_doc_text = "/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCACAAIADASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwD36iiigAooooA5vVdW8Rl5ItE0FZNrFfPvJljU+6qDkj34rkNab4nW9q8haJweANOUOw/AjP8AOvU6KAPEvDGo+NdZ1i5sLvULiKQORGJZCm3HXIHPTHHWtnUtS8Q6BfyWt1qjSsqCRXicsGU54IbODx/KpPEt0X8cTXWly+VJZQqtxMBkGQZwPchSAfy7Vc0vwk/iEyajq13cgySZ/dttMmOxPYcYwMdKYGL4d+LU8gM2rxhbJbhYGlYYZd2fm4AzjHIx0+leuo6yIHRgysMgg5BFeRePfhYX0/7doUtxI9upZ7SWUvuUDkpnnd1479BVb4V/EAWyx+HNZlKKp2Wk0nG3/pm2ent+XpQB7PRRRSAKKKKACiiigAooooAKKKKACsXxZr8Xhrw1eanIRujXbEp/ikbhR+Z/StqvIPixrC3HiPS9EBDQWaG/uF7Fvuxqf1P40IDM0K2uNQ12zsBLIXVPtFyCfvSyHjf6nktjtXt1vAltbxwxjCRqFUewrzP4UWkl0+oaxcKS0knyse5I/oMfnXqFNgFcJ42+Glh4nL31oVtNVPJl52y4GAHH9QM/Wu7opAeR+H/G2seDLqPQvGltMtuvywXxG7A7ZI++vv8AeHcV6vbXUF7bR3FtNHNDINySRsGVh7EVFf6dZ6paPa31tFcQOMMki5H/ANY+9clp3gu78K65Dc+Hr2U6XNLi706d8qqn+NCe4647jvQB29FFFABRRRQAUUUUAFFFFABXzPrWoHXPF3iG/DZWa8FtEfRE+UfzzX0TrV+ml6HfX8jbVt4Hkzj0Bx+tfMPhVWuIbRSMvPfNuJ9eaaA+j/B2mrpfhayhCgM6ea31bn+WB+Fb1NjRYo1jUYVQFH0FOpAFFFFABRRRQAUUUUAFFFFABRRRQAUUUUAZHisE+ENaA6/YZv8A0A185eAYwRozn/oJun5BT/WvprU7f7XpV5bf89oHj/NSK+YfB2osLHSYXzm31hsjPTdGP6qaaA+qKKKKQBRRRQAUUUUAFFFFABRRRQAUUUUAFFFFABXyjNbto3ifxBpwCgWerrMOTnaXKjAx0w4/Ovq6vnT4oac1h8SNYdFhRdS01Jw8suwBlIBx6nKChAfQdhN9o062mznzIlb8wKsVzfg7VYLjwRY30kypEkPzu5ACgdye3FdGrB1DKQVIyCO9AC0UUUAFFFFABRRRQAUUUUAFFFFABRRRQAV4j+0NpO+z0XWFTmKR7dz7MAy/qp/Ovbq4b4vacNR+G+pcZa22XKj/AHWGf0JoA8h+H/h/XvG6Q2MmoPFoFq6vNF5uA2OMBB1J29TwK+llUKoUDAAwBXgvwKv1t9cvNPYjE0RZOOc9ev4Gve6bAKKKKQBRRRQAUUUUAFFFFABRRRQAUUUUAFUdasRqeh39gQCLm3ki/wC+lIq9RQB8m+BtVfQfGGm3chKhJBHL9M4b+tfWIIIyDkV8q+ONKOi+OdWtVXai3Jmj/wByT5x/MivoT4f66uv+D7K4LgzRL5Mv+8vf8Rg1T7gdPRXP+ENWuNV0y7F2/mXNpfXFrI+AM7HO3p/skV0FSAUUUUAFFFFABRRRQAUUUUAFFFFABRRRQB4z8bvDkrz6frtrEXLD7HOFHOScxn88j8q4rRdY8W+Cnm0+1hntZ503eTNb5Lc4BUN1OTjjPWvpDUrCPU9Oms5eFkXAP90jkH8CAfwrP0vTZ57v+1dWjiN8E8qJQgxCoJyVJ7t1J+lO/QDP+Hvhy78N+GBDqMxl1C6ma6umJziR+oz39/fNdXRRSAKKKKACiiigAooooA//2Q==" init() category = run(test_doc_text) print(category) # Generate the schema file (schema.json) inputs = {"doc_base64": SampleDefinition(DataTypes.STANDARD, test_doc_text)} generate_schema(run_func=run, inputs=inputs, filepath='./outputs/schema.json')
def main(): # Load the Azure ML libraries to generate a schema from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema # Test the init and run functions using test data test_doc_text = "OpenGL on the GPU is fast" init() category = run(test_doc_text) print(category) # Generate the schema file (schema.json) inputs = {"doc_text": SampleDefinition(DataTypes.STANDARD, test_doc_text)} generate_schema(run_func=run, inputs=inputs, filepath='schema.json')
def generate_api_schema(): import os print("create schema") df = pd.read_csv("sample.csv") inputs = {"input_df": SampleDefinition(DataTypes.PANDAS, df)} os.makedirs('outputs', exist_ok=True) print(generate_schema(inputs=inputs, filepath="outputs/schema.json", run_func=run))
def generate_api_schema(): import os print("create schema") sample_input = "byestring_representing_image" inputs = {"input_bytes": SampleDefinition(DataTypes.STANDARD, sample_input)} os.makedirs('outputs', exist_ok=True) print(generate_schema(inputs=inputs, filepath=os.path.join("outputs","schema.json"), run_func=run))
def generate_api_schema(): import os print("create schema") sample_input = "sample data text" inputs = {"text": SampleDefinition(DataTypes.STANDARD, sample_input)} os.makedirs('outputs', exist_ok=True) print(generate_schema(inputs=inputs, filepath="outputs/schema.json", run_func=run))
def createwebserviceschema(): from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema inputs = {"inputData": SampleDefinition(DataTypes.NUMPY, X_test)} print(generate_schema(run_func=run, inputs=inputs, filepath="./outputs/schema.json"))
def main(): from tatk.pipelines.text_classification.text_classifier import TextClassifier from sklearn.linear_model import LogisticRegression import pandas init() df = pandas.DataFrame(data=[['please add your good text here.']], columns=['tweets']) #input1 = pandas.DataFrame(data=[['please add your good text here.']], columns=['tweets']) inputs = {"input_df": SampleDefinition(DataTypes.PANDAS, df)} #Generate the schema generate_schema(run_func=run, inputs=inputs, filepath='service-schema.json') print("Schema generated")
def main(): # Generating random 28x28 pixels to use as sample input sample_input = (np.random.rand(28, 28, 1) * 255) #.astype('uint8') sample_input = sample_input.reshape(1, 28, 28, 1) # Reshaping to match training data # Calling init() and run() init() inputs = {"input_array": SampleDefinition(DataTypes.NUMPY, sample_input)} result_string = run(sample_input) print("resultString = " + str(result_string)) # Generating the schema generate_schema(run_func=run, inputs=inputs, filepath='outputs/schema.json') print('Schema generated')
def main(): # Test the init and run functions using test data init() test_doc_text = "🍪" category = run(test_doc_text) print(category) test_doc_text = "tomato pizza" # Generate the schema file (schema.json) needed for AML operationalization inputs = {"doc_text": SampleDefinition(DataTypes.STANDARD, test_doc_text)} generate_schema(run_func=run, inputs=inputs, filepath='./outputs/schema.json') block_blob_service.create_blob_from_path('embeddings', 'schema.json', './outputs/schema.json')
def main(): from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema import pandas df = pandas.DataFrame(data=[[380, 120, 76]], columns=['indicator1', 'NF1', 'cellprofiling']) # Check the output of the function init() input1 = pandas.DataFrame([[380, 120, 76]]) print("Result: " + run(input1)) inputs = {"input_df": SampleDefinition(DataTypes.PANDAS, df)} # Generate the service_schema.json generate_schema(run_func=run, inputs=inputs, filepath='output/service_schema.json') print("Schema generated")
def main(): from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema import pandas df = pandas.DataFrame(data=[[190, 60, 38]], columns=['height', 'width', 'shoe_size']) # Test the functions' output init() input1 = pandas.DataFrame([[190, 60, 38]]) print("Result: " + run(input1)) inputs = {"input_df": SampleDefinition(DataTypes.PANDAS, df)} # Generate the service_schema.json generate_schema(run_func=run, inputs=inputs, filepath='service_schema.json') print("Schema generated")
def main(): from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema sentences = ['This is a sentence.', 'This is another one.'] # Test the output of the functions init() result = run(sentences) inputs = {"sentences": SampleDefinition(DataTypes.STANDARD, sentences)} #Generate the schema generate_schema(run_func=run, inputs=inputs, filepath='infersent_service_schema.json') print("Schema generated")
def main(): from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema import pandas # Turn on data collection debug mode to view output in stdout os.environ["AML_MODEL_DC_DEBUG"] = 'true' inputs = { "input_df": SampleDefinition(DataTypes.PANDAS, yourinputdataframe) } generate_schema(run_func=run, inputs=inputs, filepath='service_schema.json') print("Schema generated") if __name__ == "__main__": main()
def main(): from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema import numpy as np text_entry = 'This is a sentence. This is an awesome sentence!' # Test the output of the functions init() print("Result: " + run(text_entry)) inputs = {"text_entry": SampleDefinition(DataTypes.STANDARD, text_entry)} #Generate the schema generate_schema(run_func=run, inputs=inputs, filepath='./outputs/cnn_service_schema.json') print("Schema generated")
def main(): from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema import pandas df = pandas.DataFrame(data=["What a waste of time and money! The story was not realistic at all! Actually it was completely far fetched!"], columns=['Text']) # Turn on data collection debug mode to view output in stdout os.environ["AML_MODEL_DC_DEBUG"] = 'true' # Test the output of the functions init() input1 = pandas.DataFrame(data=["What a waste of time and money! The story was not realistic at all! Actually it was completely far fetched!"], columns=['Text']) print("The input {0} created the following output: {1}".format(input1['Text'], run(input1))) inputs = {"input_df": SampleDefinition(DataTypes.PANDAS, df)} #Genereate the schema generate_schema(run_func=run, inputs=inputs, filepath='./outputs/service_schema.json') print("Schema generated")
def main(): from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema import pandas df = pandas.DataFrame(data=[[1,1,1,1,1,1,1,1,1,1]], columns=['MAX_ACCX', 'MAX_ACCY', 'MAX_ACCZ', 'MAX_GYROX', 'MAX_GYROY', 'MAX_GYROZ', 'ACC_AVG', 'ACC_VAR', 'GYRO_AVG', 'GYRO_VAR']) # Turn on data collection debug mode to view output in stdout os.environ["AML_MODEL_DC_DEBUG"] = 'true' # Test the output of the functions init() input1 = pandas.DataFrame([[1,1,1,1,1,1,1,1,1,1]]) print("The input {0} created the following output: {1}".format(input1.values, run(input1))) inputs = {"input_df": SampleDefinition(DataTypes.PANDAS, df)} #Genereate the schema generate_schema(run_func=run, inputs=inputs, filepath='./outputs/service_schema.json') print("Schema generated")
def generate_api_schema(): inputs = {"input_array": SampleDefinition(DataTypes.STANDARD, "dGVzdA==")} outputs = {"coordinates": SampleDefinition(DataTypes.STANDARD, [1, 2, 4, 5, 8, 9, 5, 6])} print(generate_schema(inputs=inputs, filepath="schema.json", run_func=run, outputs=outputs)) # generate_api_schema() # import base64 # init() # with open("E:\\Repos\\esk-form-scanner-model\\src\\data\\validation\\i0.jpg", "rb") as file: # print(base64.b64encode(file.read(-1)))
def main(): from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema import pandas df = pandas.DataFrame( data=[[3.0, 3.6, 1.3, 0.25]], columns=['sepal length', 'sepal width', 'petal length', 'petal width']) # Test the output of the functions init() input1 = pandas.DataFrame([[3.0, 3.6, 1.3, 0.25]]) print("Result: " + run(input1)) inputs = {"input_df": SampleDefinition(DataTypes.PANDAS, df)} #Genereate the schema generate_schema(run_func=run, inputs=inputs, filepath='service_schema.json') print("Schema generated")
#========================= CREATE WEB SERVICE SCHEMA ========================= def run(inputData): import json prediction = classifier.predict(input_df) prediction = "%s %d" % (str(input_df), classifier) return json.dumps(str(prediction)) from azureml.api.schema.dataTypes import DataTypes from azureml.api.schema.sampleDefinition import SampleDefinition from azureml.api.realtime.services import generate_schema inputs = {"inputData": SampleDefinition(DataTypes.NUMPY, X_test)} print(generate_schema(run_func=run, inputs=inputs, filepath="./outputs/schema.json")) ''' #========================= VISUALISING THE RESULTS ========================= import numpy as np import matplotlib.pyplot as plt from matplotlib.colors import ListedColormap # Visualising the Training set results X_set, y_set = X_train, y_train X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01), np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01)) plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), alpha = 0.75, cmap = ListedColormap(('red', 'green'))) plt.xlim(X1.min(), X1.max()) plt.ylim(X2.min(), X2.max())