示例#1
0
def _eval_by_rel(questions, model, config, pred2idx, argw2idx,
                 relation_config):
    embs1, ridxs, ys = [], [], []
    all_cins = []
    for q in questions:
        x1 = utils.get_raw_event_repr(q.rel.e1, config, pred2idx, argw2idx)
        embs1.append(x1)
        ridxs.append(q.rel.rtype_idx)
        y = q.ans_idx
        ys.append(y)

        cins = []
        for c in q.choices:
            cin = utils.get_raw_event_repr(c, config, pred2idx, argw2idx)
            cins.append(cin.tolist())
        all_cins.append(cins)

    embs1 = torch.stack(embs1, dim=0).to(args.device)
    ridxs = torch.LongTensor(ridxs).to(args.device)
    ys = torch.LongTensor(ys).to(args.device)
    embs1 = model._transfer(model.embed_event(embs1), ridxs)
    rembs = model.rel_embeddings(ridxs)

    all_cins = torch.LongTensor(all_cins).to(args.device)
    n_choices = len(questions[0].choices)
    scores = torch.zeros((n_choices, len(questions)), dtype=torch.float32)
    for i in range(n_choices):
        cembs = model.embed_event(all_cins[:, i, :])
        _score = model._calc(embs1, model._transfer(cembs, ridxs), rembs)
        score = torch.sum(_score, 1)
        if model.norm > 1:
            score = torch.pow(score, 1.0 / model.norm)
        scores[i] = score
    _, y_predict = torch.min(scores, 0)
    return ys, y_predict
示例#2
0
def build_examples(questions, config, pred2idx, argw2idx):
    Xs, ys = [], []
    for q in questions:
        x1 = utils.get_raw_event_repr(q.rel.e1, config, pred2idx, argw2idx)
        x2 = utils.get_raw_event_repr(q.rel.e2, config, pred2idx, argw2idx)
        x = torch.cat((x1, x2), 0)
        Xs.append(x)
        y = q.rel.rtype_idx
        ys.append(y)
    Xs = torch.stack(Xs, dim=0).to(args.device)
    ys = torch.LongTensor(ys).to(args.device)
    return Xs, ys
def build_embeddings(model, questions, config, pred2idx, argw2idx, rtype, we):
    e2idx = {}
    idx = 0
    for q in questions:
        key = q.echain[0].__repr__()
        if key not in e2idx:
            e2idx[key] = idx
            idx += 1

        for clist in q.choice_lists:
            for c in clist:
                key = c.__repr__()
                if key not in e2idx:
                    e2idx[key] = idx
                    idx += 1
    e_len = 1 + config['arg0_max_len'] + config['arg1_max_len']
    inputs = torch.zeros((len(e2idx), e_len),
                         dtype=torch.int64).to(args.device)
    wdim = we[we.keys()[0]].shape[0]
    w_embeddings = torch.zeros((len(e2idx), wdim),
                               dtype=torch.float32).to(args.device)

    for q in questions:
        key = q.echain[0].__repr__()
        idx = e2idx[key]
        inputs[idx] = utils.get_raw_event_repr(q.echain[0],
                                               config,
                                               pred2idx,
                                               argw2idx,
                                               device=args.device,
                                               use_head=args.use_head)
        w_embeddings[idx] = event_to_we(q.echain[0], we, wdim)

        for clist in q.choice_lists:
            for c in clist:
                key = c.__repr__()
                idx = e2idx[key]
                inputs[idx] = utils.get_raw_event_repr(c,
                                                       config,
                                                       pred2idx,
                                                       argw2idx,
                                                       device=args.device,
                                                       use_head=args.use_head)
                w_embeddings[idx] = event_to_we(c, we, wdim)
    ev_embeddings = model._transfer(model.embed_event(inputs), rtype)
    return e2idx, ev_embeddings, w_embeddings
def build_embeddings(model, questions, config, pred2idx, argw2idx, rtype):
    e2idx = {}
    idx = 0
    for q in questions:
        for ctx in q.get_contexts():
            key = ctx.__repr__()
            if key not in e2idx:
                e2idx[key] = idx
                idx += 1

        for ch in q.choices:
            key = ch.__repr__()
            if key not in e2idx:
                e2idx[key] = idx
                idx += 1
    e_len = 1 + config['arg0_max_len'] + config['arg1_max_len']
    inputs = torch.zeros((len(e2idx), e_len),
                         dtype=torch.int64).to(args.device)

    for q in questions:
        for e in q.get_contexts():
            idx = e2idx[e.__repr__()]
            inputs[idx] = utils.get_raw_event_repr(e,
                                                   config,
                                                   pred2idx,
                                                   argw2idx,
                                                   device=args.device,
                                                   use_head=args.use_head)
        for e in q.choices:
            idx = e2idx[e.__repr__()]
            inputs[idx] = utils.get_raw_event_repr(e,
                                                   config,
                                                   pred2idx,
                                                   argw2idx,
                                                   device=args.device,
                                                   use_head=args.use_head)
    embeddings = model._transfer(model.embed_event(inputs), rtype)
    return e2idx, embeddings
def build_ev_embeddings(questions, config, pred2idx, argw2idx, model):
    with torch.no_grad():
        idx = 0
        e2idx = {}
        xs = []
        for i_cat in questions.keys():
            for label, q in questions[i_cat]:
                for e in [q.rel.e1, q.rel.e2]:
                    if e.__repr__() not in e2idx:
                        e2idx[e.__repr__()] = idx
                        idx += 1
                        x = utils.get_raw_event_repr(e, config, pred2idx,
                                                     argw2idx)
                        xs.append(x)
        xs = torch.stack(xs, dim=0).to(args.device)
        embeddings = model.embed_event(xs)
    return e2idx, embeddings