Exemplo n.º 1
0
    def __getitem__(self, index):
        h = None
        if self.cache is None or index not in self.cache:
            h = gh.djikstra_wrapper((self.graph, [index]))
            if self.cache is not None:
                self.cache[index] = h
            #logging.info(f"info {index}")
        else:
            h = self.cache[index]
            #logging.info(f"hit {index}")

        idx = torch.tensor([(index, j) for j in range(self.n) if j != index],
                           dtype=torch.long)
        v = torch.DoubleTensor(h).view(-1)[idx[:, 1]]
        return (idx, v)
Exemplo n.º 2
0
    def __getitem__(self, index):
        if index not in self.cache:
            if self.verbose: logging.info(f"Cache miss for {index}")
            h = gh.djikstra_wrapper(
                (self.graph,
                 [index]))[0, :] if self.Z is None else self.Z[index, :]
            # add in all the edges
            cur = 0
            self.idx_cache[index, :, 0] = index
            neighbors = scipy.sparse.find(self.graph[index, :])[1]
            for e in neighbors:
                self.idx_cache[index, cur, 1] = int(e)
                self.val_cache[index, cur] = self.scale * h[e]
                self.w_cache[index, cur] = self.weight_fn(1.0)
                cur += 1
                if cur >= self.nbr_frac * self.subsample: break

            scratch = np.array(range(self.n))
            np.random.shuffle(scratch)

            i = 0
            while cur < self.subsample and i < self.n:
                v = scratch[i]
                if v != index and v not in neighbors:
                    self.idx_cache[index, cur, 1] = int(v)
                    self.val_cache[index, cur] = self.scale * h[v]
                    # self.val_cache[index,cur]   = 0
                    self.w_cache[index, cur] = self.weight_fn(h[v])
                    cur += 1
                i += 1
            if self.verbose:
                logging.info(
                    f"\t neighbors={neighbors} {self.idx_cache[index,:,1].numpy().T}"
                )
            self.cache.add(index)
            self.n_cached += 1
            # if self.n_cached % (max(self.n//20,1)) == 0: logging.info(f"\t Cached {self.n_cached} of {self.n}")

        # print("GraphRowSubSampler: idx shape ", self.idx_cache[index,:].size())
        return (self.idx_cache[index, :], self.val_cache[index, :],
                self.w_cache[index, :])