def main(_): args = params_setup() print("[args]: ", args) if args.mode == 'train': train(args) elif args.mode == 'test': predict(args) elif args.mode == 'chat': chat(args)
import rospy from roboy_communication_cognition.srv import DetectIntent rospack = rospkg.RosPack() sys.path.insert(0, rospack.get_path('roboy_intents_classification')) from include import skipthoughts neighbors = 1 stemmer = LancasterStemmer() words = [] classes = [] documents = [] sentences = [] ignore_words = ['?', ',', 'roboy', 'Roboy', '\n', '.'] args = params_setup() def read_intents(): import os intents_path = args.intents_path#os.getcwd() + "/intents/"; training_data = [] for filename in os.listdir(intents_path): with open(intents_path + filename) as f: for line in f: training_data.append({"class": filename, "sentence": line}) return training_data def sanitize_sentence(sentence):
sys.stdout.write("> ") sys.stdout.flush() sentence = sys.stdin.readline() while sentence: predicted_sentence = get_predicted_sentence( args, sentence, vocab, rev_vocab, model, sess) if isinstance(predicted_sentence, list): for sent in predicted_sentence: print(" (%s) -> %s" % (sent['prob'], sent['dec_inp'])) else: print(sentence, ' -> ', predicted_sentence) sys.stdout.write("> ") sys.stdout.flush() sentence = sys.stdin.readline() def main(): """ Evaluate seq2seq """ evaluate(FLAGS) if __name__ == '__main__': FLAGS = params_setup() main()
async def service_callback(): async with websockets.connect('ws://localhost:9090') as websocket: # advertise the service await websocket.send("{ \"op\": \"advertise_service\",\ \"type\": \"roboy_communication_cognition/GenerateAnswer\",\ \"service\": \"/roboy/cognition/generative_nlp/answer\"\ }") i = 1 # counter for the service request IDs with tf.Session() as sess: # Create model and load parameters. logging.info("Loading the model") args = params_setup() args.batch_size = 1 # We decode one sentence at a time. model = create_model(sess, args) # Load vocabularies. vocab_path = os.path.join(args.data_dir, "vocab%d.in" % args.vocab_size) vocab, rev_vocab = data_utils.initialize_vocabulary(vocab_path) logging.info( "Service /roboy/cognition/generative_nlp/answer is ready") # wait for the service request, generate the answer, and send it back while True: try: request = await websocket.recv() sentence = json.loads(request)["args"]["text_input"] model_response = get_predicted_sentence( args, sentence, vocab, rev_vocab, model, sess) srv_response = {} answer = {} if isinstance(model_response, list): text = model_response[0]['dec_inp'] else: text = model_response['dec_inp'] answer["text_output"] = ''.join([ i if ord(i) < 128 else '' for i in text ]) # strip down unicode srv_response["values"] = answer srv_response["op"] = "service_response" srv_response[ "id"] = "service_request:/roboy/cognition/generative_nlp/answer:" + str( i) srv_response["result"] = True srv_response[ "service"] = "/roboy/cognition/generative_nlp/answer" i += 1 await websocket.send(json.dumps(srv_response)) except Exception as e: logging.exception( "Oopsie! Got an exception in generative_nlp")