def predict_host(host_csv="./hostHW/host_hwrecord.csv", predict_type="both", cpu_weight=0.5, memory_weight=0.5): predict(previous_csv=host_csv, predict_type=predict_type, cpu_weight=cpu_weight, memory_weight=memory_weight)
def predict_container(predict_type="both", cpu_weight=0.5, memory_weight=0.5): if cpu_weight + memory_weight != 1: raise RuntimeError("Weight Error") # get all container id result_conid = get_id() for id in result_conid: id = id[:-1] container_csv = "./containerHW/" + id + "_hwrecord.csv" predict(previous_csv=container_csv, predict_type=predict_type, cpu_weight=cpu_weight, memory_weight=memory_weight) return
def predictor(): p = predict() if request.method == 'POST': message = request.form['mail'] data = [message] result = p.prediction(data) #result = str(result) #print(result) #print(type(result)) return render_template('sample.html', tables=[result.to_html(classes='data')], titles=result.columns.values) #return result return render_template('predict.html')
def predict_attend(request): """HTTP Cloud Function. Args: request (flask.Request): The request object. <http://flask.pocoo.org/docs/1.0/api/#flask.Request> Returns: The response text, or any set of values that can be turned into a Response object using `make_response` <http://flask.pocoo.org/docs/1.0/api/#flask.Flask.make_response>. """ request_json = request.get_json(silent=True) result = pm.predict(request_json) print('you will be absent at {}!'.format(result)) return (json.dumps(result), 200, {})
def main(): # Parse input arguments args = parse_inputs() # We get the image as a FloatTensor img = process_image(args.image) # We now have to load the checkpoint and build the model model = loadcheckpoint(args) # We will performe the calculation in the select device device = "cuda:0" if torch.cuda.is_available() and args.gpu else "cpu" print(f'These computations are performed in {device}') model.to(device) # We set the model to evaluation mode (so that we are not using dropout) model.eval() with torch.no_grad(): probs,classes = predict(img, model, args.top_k, device) # Now, if we indicated a category_name: if args.category_names: try: with open(args.category_names, 'r') as f: cat_to_name = json.load(f) except FileNotFoundError: print("The category names file has not been found.") print("Please introduce a valid file.") sys.exit("Program terminating.") classes = [cat_to_name[item] for item in classes] # Printing out the results print(f'The {args.top_k} most likely classes of flowers are:') for key, value in zip(classes, probs): print(f'Flower: {key}; ' f'Probability: {value}')
# Label mapping with open(category_names, 'r') as f: cat_to_name = json.load(f) # Load the checkpoint filepath = checkpoint + '/checkpoint.pth' checkpoint = torch.load(filepath, map_location='cpu') model = checkpoint["model"] model.load_state_dict(checkpoint['state_dict']) # Image preprocessing np_image = process_image(image_path) # imshow(np_image) # Predict class and probabilities print( f"Predicting top {top_k} most likely flower names from image {image_path}." ) probs, classes = predict(np_image, model, top_k, gpu) classes_name = [cat_to_name[class_i] for class_i in classes] # print("Flower names: ", classes_name) # print("Probabilities: ", [round(prob, 3) for prob in probs]) print("\nFlower name (probability): ") print("---") for i in range(len(probs)): print(f"{classes_name[i]} ({round(probs[i], 3)})") print("")
image_path = result.image_path saved_model = result.saved_model top_k = result.top_k category_names = result.category_names if top_k == None: top_k = 5 # load model reloaded_keras_model = tf.keras.models.load_model( saved_model, custom_objects={'KerasLayer': hub.KerasLayer}) # predict image image = np.asarray(Image.open(image_path)) probs, classes = predict(image_path, reloaded_keras_model, top_k) if category_names != None: with open(category_names, 'r') as f: class_names = json.load(f) names = [str(x + 1) for x in classes] classes = [class_names.get(name) for name in names] # print results print( '\n********************************************************************************************' ) print('\nthe {} top classes:'.format(top_k)) for i in range(top_k): print('\n\u2022 Class: {}'.format(classes[i]), '\n\u2022 Probability: {:.3%}'.format(probs[i]))
def predict_tweet(json_tweet): text = json_tweet["text"] prediction = predict([text]) return prediction