Exemple #1
0

@app.route("/")
def home():
    return render_template("index.html")


@app.route("/predict")
def predict():
    sentence = request.args.get("sentence")
    start_time = time.time()
    positive_prediction = sentence_prediction(sentence)
    negative_prediction = 1 - positive_prediction
    response = {}
    response["response"] = {
        "positive": str(positive_prediction),
        "negative": str(negative_prediction),
        "sentence": str(sentence),
        "time_taken": str(time.time() - start_time),
    }
    return flask.jsonify(response)


if __name__ == "__main__":
    MODEL = BERTBaseUncased()
    # MODEL = nn.DataParallel(MODEL)
    MODEL.load_state_dict(torch.load(config.MODEL_PATH))
    MODEL.to(DEVICE)
    MODEL.eval()
    app.run()
Exemple #2
0

if __name__ == '__main__':
	device='cuda'
	review = ['this is an amzaing place']

	dataset = BERTDataset(
			review = review,target=[0]
		)
	model = BERTBaseUncased()

	#model = nn.DataParallel(model) #if you have used model as DataParallel-model then inside torch.onnx.export use 'model.module' instead of model
	#====>>> question is from where 'module' arises ??=>>>> print model(uncomment line 25) here .You will output as Dataparallel({module:BERTBaseUncased())
	# ====> that means 'module' is key of 'BERTBaseUncased' model's value
	model.load_state_dict(torch.load(config.MODEL_PATH))
	model.eval()
	#print(model)

	ids = dataset[0]['ids'].unsqueeze(0)
	mask = dataset[0]['mask'].unsqueeze(0)
	token_type_ids = dataset[0][''token_type_ids].unsqueeze(0)

	torch.onnx.export(
			model,
			#model.module  [===>> if dataparallel-model ==>> see above commented line 20] 

			#we have 3 inputs so  name them here -- ordering is important 
			(ids,mask,token_type_ids),

			#export it to model.onnx
			'model.onnx',