Esempio n. 1
0
def link_prediction(data,
                    partition_fn,
                    zs,
                    start=0,
                    end=None,
                    include_tr=True,
                    batched=False,
                    nratio=1):
    if batched:
        raise NotImplementedError("Sorry, batching is a TODO")

    end = end if end else data.T
    negs = []

    if partition_fn == None:
        partition_fn = lambda x: data.eis[x]

    for t in range(start, end):
        ei = tp = partition_fn(t)

        # Also avoid edges from training set (assuming this is val or test
        # calling the function)
        if include_tr:
            ei = torch.cat([ei, data.tr(t)], dim=1)

        neg = fast_negative_sampling(ei, int(tp.size(1) * nratio),
                                     data.num_nodes)
        negs.append(neg)

    return [partition_fn(i) for i in range(start, end)], negs, zs
Esempio n. 2
0
def lightweight_lp(eis, num_nodes, nratio=1, num_pos=None):
    negs = []
    pos = lambda i: eis[i].size(1) if type(num_pos) == type(None) else num_pos[
        i]

    for i in range(len(eis)):
        ei = eis[i]
        negs.append(fast_negative_sampling(ei, pos(i), num_nodes))

    return negs
Esempio n. 3
0
def get_sample(data, enum, nsize=TR_PARAMS['neg_samples']):
    ps = [data.ei_masked(enum, i) for i in range(data.T)]

    ns = [
        fast_negative_sampling(
            ps[i],
            int(ps[i].size(1) * nsize),
            data.num_nodes,
        ) for i in range(data.T)
    ]

    return ps, ns
Esempio n. 4
0
def dynamic_new_link_prediction(data,
                                partition_fn,
                                zs,
                                start=0,
                                end=None,
                                include_tr=True,
                                batched=False):
    if batched:
        raise NotImplementedError("Sorry, batching is a TODO")

    p, n = [], []
    b = None

    if partition_fn == None:
        partition_fn = lambda x: data.eis[x]

    end = end if end else data.T

    for i in range(start, end):
        # Use full adj matrix for new link pred
        ei = partition_fn(i)

        a = b
        b = to_dense_adj(ei, max_num_nodes=data.num_nodes)[0].bool()

        if type(a) == type(None):
            continue

        # Generates new links in next time step
        new_links = (~a).logical_and(a.logical_or(b))
        new_links, _ = dense_to_sparse(new_links)

        p.append(new_links)
        n.append(fast_negative_sampling(ei, p[-1].size(1), data.num_nodes))

    return p, n, zs[:-1]