Exemplo n.º 1
0
 def on_get(self, req, resp, name=None):
     if name is None:
         resp.status = falcon.HTTP_404
         raise falcon.HTTPBadRequest('Param :name is required')
     else:
         try:
             int(name)
             resp.status = falcon.HTTP_404
             raise falcon.HTTPBadRequest(
                 'Param :name must be a valid string')
         except ValueError:
             count = 0
             for i in name:
                 count += punctuation.count(i)
             if count > 0:
                 resp.status = falcon.HTTP_404
                 raise falcon.HTTPBadRequest(
                     'Param :name must be a valid string')
             else:
                 resp.body = (f'Hola {name}')
Exemplo n.º 2
0
 def on_post(self, req, resp, userid=None):
     # body = json.loads(req.stream.read())
     body = req.media  # 2 code tuong duong
     for k, v in body.items():
         if v == '':
             resp.status = falcon.HTTP_404
             raise falcon.HTTPBadRequest('all values cannot be empty')
     for k, v in body.items():
         try:
             int(v)
             resp.status = falcon.HTTP_404
             raise falcon.HTTPBadRequest('all values ​​must be valid')
         except ValueError:
             count = 0
             for i in v:
                 count += punctuation.count(i)
             if count > 0:
                 resp.status = falcon.HTTP_404
                 raise falcon.HTTPBadRequest('all values ​​must be valid')
     name = body['fname']
     lastname = body['lname']
     content = {'message': f'Hello{name} {lastname}'}
     resp.body = json.dumps(content)
 def de_punc(self):
     return "".join(
         [word for word in self.text if punctuation.count(word) == 0])
Exemplo n.º 4
0
def ajust_eval(filepath,loop_idx = -1):
	#Init reader and load model
	reader = Reader()
	header_map = words_2_map(reader)
	if len(header_map)==0:
		return
	model = DynamicLSTM(None, is_training=False, reuse=False)
	model_variables = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope="model")
	model_saver = tf.train.Saver(model_variables)
	init_op = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer())
	with tf.Session(config=tf.ConfigProto(allow_soft_placement=True, log_device_placement=False)) as sess:
		sess.run(init_op)
		#Model load
		ckpt_path = tf.train.latest_checkpoint(config.model_dir)
		if loop_idx > 0:
			ckpt_path = ckpt_path.split('-')[0] + '-' + str(loop_idx)
		if ckpt_path:
			model_saver.restore(sess, ckpt_path)
			print("Read model parameters from %s" % ckpt_path)
		else:
			print("model doesn't exists")

		#File handle
		f = open(filepath)
		click_total = {"total":0,1:0,3:0,5:0}
		hit_total = {"total":0,1:0,3:0,5:0}
		UPPERS=['NUM','TELEPHONE','DIGIT','SCORES','USER','YEARGAP']

		#File loop, file must be format context \t(input\t)word
		for line in f:
			line = line.strip()
			line_items = line.split('\t')
			if len(line_items)<2:
				sys.stderr.write("Error line format, items missing of line: "+line+'\n')
			#line info handle
			context = line_items[0].strip()
			word = line_items[-1].strip()
			input_str = word
			if len(line_items)==3:
				input_str = line_items[1].strip()
			if input_str == "" or punctuation.count(word)>0 or word.isdigit() or UPPERS.count(word)>0:
				continue

			#Data model input output
			inputs, inputs_len, outputs = reader.get_batch_from_input(context)
			feed_dict={ model.x: inputs, model.x_len: inputs_len}
			prob = sess.run([model.output_prob], feed_dict=feed_dict)
			last_prob = prob[-1][-1]

			#Exp starts
			click_total["total"] += len(word)#Click rate's denominator
			hit_total["total"] += 1
			input_len = len(input_str)
			tmp_click_count = {1:input_len,3:input_len,5:input_len}#list to record the click count for top1, top3, top5
			tmp_hit_count = {1:0,3:0,5:0}
			for i in range(1,(len(input_str)+1)):
				input_header = input_str[:i]
				top5_ind = get_header_prob(header_map,last_prob,input_header,5)
				words = [reader.words[j] for j in top5_ind]
				if len(words)==0:
					words.append(input_header)
				for idx in [1,3,5]:
					if words[:idx].count(word)>0:
						if tmp_click_count[idx]==input_len:
							tmp_click_count[idx] = i
						if i == 1:
							tmp_hit_count[idx] = 1
				print("Context is : "+context)
				print("Input is : "+input_header)
				print("Expected word is : " + word)
				print("Word_predict is : "+"#".join(words))
				if words[:1].count(word)>0:
					break
			for idx in [1,3,5]:
				click_total[idx] += tmp_click_count[idx]
				hit_total[idx] += tmp_hit_count[idx]
		
		print(filepath + "'s type-rate is >>> top5_type-rate is : %.2f%%, top3_type-rate is : %.2f%%, top1_type-rate is : %.2f%%, top5_type_total is : %f, top3_type_total is : %f, top1_type_total is : %f, total type count is : %f."%(click_total[5]*100/click_total['total'], click_total[3]*100/click_total['total'],click_total[1]*100/click_total['total'],click_total[5], click_total[3],click_total[1],click_total['total']))
		print(filepath + "'s uni-input-hit-rate is >>> top5_hit_rate is : %.2f%%, top3_hit_rate is : %.2f%%, top1_hit_rate is : %.2f%%, top5_hit_total is %f,top3_hit_total is %f,top1_hit_total is %f,total count is : %f."%(hit_total[5]*100/hit_total['total'], hit_total[3]*100/hit_total['total'],hit_total[1]*100/hit_total['total'],hit_total[5], hit_total[3],hit_total[1],hit_total['total']))