def compute(self, data, scale_res=True): res = {} for method in self.method: res["ruber_refer_bert_%s" % method] = [] for tmp_data in data: origin_candidate = tmp_data['candidate'] origin_reference = tmp_data['reference'] if not isinstance(origin_reference, list): origin_reference = [origin_reference] tmp_res = {} for method in self.method: tmp_res[method] = [] for r in origin_reference: cand_tokens = torch.tensor(self.tokenizer.encode(origin_candidate)).unsqueeze(0) ref_tokens = torch.tensor(self.tokenizer.encode(r)).unsqueeze(0) if torch.cuda.is_available(): cand_tokens = cand_tokens.cuda() ref_tokens = ref_tokens.cuda() cand_embed_list = torch.squeeze(self.model(cand_tokens)[0]).detach().cpu().numpy().tolist() ref_embed_list = torch.squeeze(self.model(ref_tokens)[0]).detach().cpu().numpy().tolist() for method in self.method: tmp_res[method].append( self.score(cand_embed_list=cand_embed_list, ref_embed_list=ref_embed_list, pooling_type=method)) for method in self.method: res["ruber_refer_bert_%s" % method].append(max(tmp_res[method])) if scale_res: for key in res: res[key] = normalize_score(res[key]) return res
def compute(self, data, scale_res=True): res = {} for method in self.method: res["%s_%s" % (self.name, method)] = [] for k, tmp_data in enumerate(data): st = time.time() origin_candidate = tmp_data['candidate'] origin_reference = tmp_data['reference'] if not isinstance(origin_reference, list): origin_reference = [origin_reference] tmp_res = {} for method in self.method: tmp_res[method] = [] for r in origin_reference: cand_embed_list = sent2vec(tokenize(origin_candidate, self.tokenizer), self.embed) ref_embed_list = sent2vec(tokenize(r, self.tokenizer), self.embed) for method in self.method: tmp_res[method].append( self.score(cand_embed_list=cand_embed_list, ref_embed_list=ref_embed_list, pooling_type=method)) for method in self.method: res["%s_%s" % (self.name, method)].append(max(tmp_res[method])) if scale_res: for key in res: res[key] = normalize_score(res[key]) return res
def compute(self, data, scale_res=True): ruber_bert_graph = tf.Graph() config = tf.ConfigProto() config.gpu_options.allow_growth = True res = {} with tf.Session(config=config, graph=ruber_bert_graph) as sess: model = ruber_unrefer_model_bert(hidden_dim=self.model.config.hidden_size) model.print_parameters() if tf.train.get_checkpoint_state(self.model_path): print("Reading model parameters from %s" % (self.model_path)) model.saver.restore(sess, tf.train.latest_checkpoint(self.model_path)) else: print("Created model with fresh parameters.") sess.run(tf.global_variables_initializer()) data_ = self.pro_data(data, pooling_type=self.method) if scale_res: score_list = normalize_score(self.score(model, sess, data_)) else: score_list = self.score(model, sess, data_) res["ruber_unrefer_bert_%s" % self.method] = score_list return res
def compute(self, data, scale_res=True): ruber_graph = tf.Graph() config = tf.ConfigProto() config.gpu_options.allow_growth = True data = self.pro_data(data) with tf.Session(config=config, graph=ruber_graph) as sess: model = ruber_unrefer_model( vocab_size=self.vocab_size, embed_dim=self.embed_dim, hidden_dim=self.hidden_dim) model.print_parameters() if tf.train.get_checkpoint_state(self.model_path): print("Reading model parameters from %s" % (self.model_path)) model.saver.restore(sess, tf.train.latest_checkpoint(self.model_path)) else: print("Created model with fresh parameters.") sess.run(tf.global_variables_initializer()) if scale_res: score_list = normalize_score(self.score(model, sess, data)) else: score_list = self.score(model, sess, data) return {"ruber_unrefer": score_list}
def compute(self, data): ruber_refer_score = self.ruber_refer.compute(data) ruber_unrefer_score = self.ruber_unrefer.compute(data) res = {} res.update(ruber_refer_score) res.update(ruber_unrefer_score) for method in self.method: for key1 in ruber_refer_score: for key2 in ruber_unrefer_score: ruber_score = self.hybird_score(ruber_refer_score[key1], ruber_unrefer_score[key2], method=method) res["%s_%s (with %s, %s)" % ( "ruber_bert" if self.bert else "ruber", method, key1, key2)] = normalize_score(ruber_score) return res